diff --git a/.env b/.env
new file mode 100644
index 00000000000..e3ececc2e54
--- /dev/null
+++ b/.env
@@ -0,0 +1,4 @@
+APP_IMAGE=gdcc/dataverse:unstable
+POSTGRES_VERSION=13
+DATAVERSE_DB_USER=dataverse
+SOLR_VERSION=9.3.0
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 00000000000..9860024f70a
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,4 @@
+# https://www.git-scm.com/docs/gitattributes
+
+# This set mandatory LF line endings for .sh files preventing from windows users to having to change the value of their git config --global core.autocrlf to 'false' or 'input'
+*.sh text eol=lf
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index b297dfc4ee8..7e6995d76d9 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -3,7 +3,7 @@ name: Bug report
 about: Did you encounter something unexpected or incorrect in the Dataverse software?
   We'd like to hear about it!
 title: ''
-labels: ''
+labels: 'Type: Bug'
 assignees: ''
 
 ---
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 7d5e0deea05..d6248537418 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -2,7 +2,7 @@
 name: Feature request
 about: Suggest an idea or new feature for the Dataverse software!
 title: 'Feature Request/Idea:'
-labels: ''
+labels: 'Type: Feature'
 assignees: ''
 
 ---
diff --git a/.github/workflows/container_app_pr.yml b/.github/workflows/container_app_pr.yml
new file mode 100644
index 00000000000..c86d284e74b
--- /dev/null
+++ b/.github/workflows/container_app_pr.yml
@@ -0,0 +1,96 @@
+---
+name: Preview Application Container Image
+
+on:
+    # We only run the push commands if we are asked to by an issue comment with the correct command.
+    # This workflow is always taken from the default branch and runs in repo context with access to secrets.
+    repository_dispatch:
+        types: [ push-image-command ]
+
+env:
+    IMAGE_TAG: unstable
+    BASE_IMAGE_TAG: unstable
+    PLATFORMS: "linux/amd64,linux/arm64"
+
+jobs:
+    deploy:
+        name: "Package & Push"
+        runs-on: ubuntu-latest
+        # Only run in upstream repo - avoid unnecessary runs in forks
+        if: ${{ github.repository_owner == 'IQSS' }}
+        steps:
+            # Checkout the pull request code as when merged
+            - uses: actions/checkout@v3
+              with:
+                  ref: 'refs/pull/${{ github.event.client_payload.pull_request.number }}/merge'
+            - uses: actions/setup-java@v3
+              with:
+                  java-version: "17"
+                  distribution: 'adopt'
+            - uses: actions/cache@v3
+              with:
+                  path: ~/.m2
+                  key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
+                  restore-keys: ${{ runner.os }}-m2
+
+            # Note: Accessing, pushing tags etc. to GHCR will only succeed in upstream because secrets.
+            - name: Login to Github Container Registry
+              uses: docker/login-action@v2
+              with:
+                  registry: ghcr.io
+                  username: ${{ secrets.GHCR_USERNAME }}
+                  password: ${{ secrets.GHCR_TOKEN }}
+
+            - name: Set up QEMU for multi-arch builds
+              uses: docker/setup-qemu-action@v2
+
+            # Get the image tag from either the command or default to branch name (Not used for now)
+            #-   name: Get the target tag name
+            #    id: vars
+            #    run: |
+            #        tag=${{ github.event.client_payload.slash_command.args.named.tag }}
+            #        if [[ -z "$tag" ]]; then tag=$(echo "${{ github.event.client_payload.pull_request.head.ref }}" | tr '\\/_:&+,;#*' '-'); fi
+            #        echo "IMAGE_TAG=$tag" >> $GITHUB_ENV
+
+            # Set image tag to branch name of the PR
+            - name: Set image tag to branch name
+              run: |
+                  echo "IMAGE_TAG=$(echo "${{ github.event.client_payload.pull_request.head.ref }}" | tr '\\/_:&+,;#*' '-')" >> $GITHUB_ENV
+
+            # Necessary to split as otherwise the submodules are not available (deploy skips install)
+            - name: Build app and configbaker container image with local architecture and submodules (profile will skip tests)
+              run: >
+                  mvn -B -f modules/dataverse-parent
+                  -P ct -pl edu.harvard.iq:dataverse -am
+                  install
+            - name: Deploy multi-arch application and configbaker container image
+              run: >
+                  mvn 
+                  -Dapp.image.tag=${{ env.IMAGE_TAG }} -Dbase.image.tag=${{ env.BASE_IMAGE_TAG }}
+                  -Ddocker.registry=ghcr.io -Ddocker.platforms=${{ env.PLATFORMS }}
+                  -Pct deploy
+
+            - uses: marocchino/sticky-pull-request-comment@v2
+              with:
+                  header: registry-push
+                  hide_and_recreate: true
+                  hide_classify: "OUTDATED"
+                  number: ${{ github.event.client_payload.pull_request.number }}
+                  message: |
+                      :package: Pushed preview images as
+                      ```
+                      ghcr.io/gdcc/dataverse:${{ env.IMAGE_TAG }}
+                      ```
+                      ```
+                      ghcr.io/gdcc/configbaker:${{ env.IMAGE_TAG }}
+                      ```
+                      :ship: [See on GHCR](https://github.com/orgs/gdcc/packages/container). Use by referencing with full name as printed above, mind the registry name.
+
+            # Leave a note when things have gone sideways
+            - uses: peter-evans/create-or-update-comment@v3
+              if: ${{ failure() }}
+              with:
+                  issue-number: ${{ github.event.client_payload.pull_request.number }}
+                  body: >
+                      :package: Could not push preview images :disappointed:.
+                      See [log](https://github.com/IQSS/dataverse/actions/runs/${{ github.run_id }}) for details.
diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml
new file mode 100644
index 00000000000..b3e247e376c
--- /dev/null
+++ b/.github/workflows/container_app_push.yml
@@ -0,0 +1,171 @@
+---
+name: Application Container Image
+
+on:
+    # We are deliberately *not* running on push events here to avoid double runs.
+    # Instead, push events will trigger from the base image and maven unit tests via workflow_call.
+    workflow_call:
+    pull_request:
+        branches:
+            - develop
+            - master
+        paths:
+            - 'src/main/docker/**'
+            - 'modules/container-configbaker/**'
+            - '.github/workflows/container_app_push.yml'
+
+env:
+    IMAGE_TAG: unstable
+    BASE_IMAGE_TAG: unstable
+    REGISTRY: "" # Empty means default to Docker Hub
+    PLATFORMS: "linux/amd64,linux/arm64"
+    MASTER_BRANCH_TAG: alpha
+
+jobs:
+    build:
+        name: "Build & Test"
+        runs-on: ubuntu-latest
+        permissions:
+            contents: read
+            packages: write
+            pull-requests: write
+        # Only run in upstream repo - avoid unnecessary runs in forks
+        if: ${{ github.repository_owner == 'IQSS' }}
+
+        steps:
+            - name: Checkout repository
+              uses: actions/checkout@v3
+
+            - name: Set up JDK
+              uses: actions/setup-java@v3
+              with:
+                  java-version: "17"
+                  distribution: temurin
+                  cache: maven
+
+            - name: Build app and configbaker container image with local architecture and submodules (profile will skip tests)
+              run: >
+                  mvn -B -f modules/dataverse-parent
+                  -P ct -pl edu.harvard.iq:dataverse -am
+                  install
+
+            # TODO: add smoke / integration testing here (add "-Pct -DskipIntegrationTests=false")
+
+    hub-description:
+        needs: build
+        name: Push image descriptions to Docker Hub
+        # Run this when triggered via push or schedule as reused workflow from base / maven unit tests.
+        # Excluding PRs here means we will have no trouble with secrets access. Also avoid runs in forks.
+        if: ${{ github.event_name != 'pull_request' && github.ref_name == 'develop' && github.repository_owner == 'IQSS' }}
+        runs-on: ubuntu-latest
+        steps:
+            - uses: actions/checkout@v3
+            - uses: peter-evans/dockerhub-description@v3
+              with:
+                  username: ${{ secrets.DOCKERHUB_USERNAME }}
+                  password: ${{ secrets.DOCKERHUB_TOKEN }}
+                  repository: gdcc/dataverse
+                  short-description: "Dataverse Application Container Image providing the executable"
+                  readme-filepath: ./src/main/docker/README.md
+            - uses: peter-evans/dockerhub-description@v3
+              with:
+                  username: ${{ secrets.DOCKERHUB_USERNAME }}
+                  password: ${{ secrets.DOCKERHUB_TOKEN }}
+                  repository: gdcc/configbaker
+                  short-description: "Dataverse Config Baker Container Image providing setup tooling and more"
+                  readme-filepath: ./modules/container-configbaker/README.md
+
+    # Note: Accessing, pushing tags etc. to DockerHub or GHCR will only succeed in upstream because secrets.
+    # We check for them here and subsequent jobs can rely on this to decide if they shall run.
+    check-secrets:
+        needs: build
+        name: Check for Secrets Availability
+        runs-on: ubuntu-latest
+        outputs:
+            available: ${{ steps.secret-check.outputs.available }}
+        steps:
+            - id: secret-check
+              # perform secret check & put boolean result as an output
+              shell: bash
+              run: |
+                  if [ "${{ secrets.DOCKERHUB_TOKEN }}" != '' ]; then
+                      echo "available=true" >> $GITHUB_OUTPUT;
+                  else
+                      echo "available=false" >> $GITHUB_OUTPUT;
+                  fi
+
+    deploy:
+        needs: check-secrets
+        name: "Package & Publish"
+        runs-on: ubuntu-latest
+        # Only run this job if we have access to secrets. This is true for events like push/schedule which run in
+        # context of the main repo, but for PRs only true if coming from the main repo! Forks have no secret access.
+        #
+        # Note: The team's decision was to not auto-deploy an image on any git push where no PR exists (yet).
+        #       Accordingly, only run for push events on branches develop and master.
+        if: needs.check-secrets.outputs.available == 'true' &&
+            ( github.event_name != 'push' || ( github.event_name == 'push' && contains(fromJSON('["develop", "master"]'), github.ref_name)))
+        steps:
+            - uses: actions/checkout@v3
+            - uses: actions/setup-java@v3
+              with:
+                  java-version: "17"
+                  distribution: temurin
+
+            # Depending on context, we push to different targets. Login accordingly.
+            - if: github.event_name != 'pull_request'
+              name: Log in to Docker Hub registry
+              uses: docker/login-action@v2
+              with:
+                  username: ${{ secrets.DOCKERHUB_USERNAME }}
+                  password: ${{ secrets.DOCKERHUB_TOKEN }}
+            - if: ${{ github.event_name == 'pull_request' }}
+              name: Login to Github Container Registry
+              uses: docker/login-action@v2
+              with:
+                  registry: ghcr.io
+                  username: ${{ secrets.GHCR_USERNAME }}
+                  password: ${{ secrets.GHCR_TOKEN }}
+
+            - name: Set up QEMU for multi-arch builds
+              uses: docker/setup-qemu-action@v2
+
+            - name: Re-set image tag based on branch (if master)
+              if: ${{ github.ref_name == 'master' }}
+              run: |
+                  echo "IMAGE_TAG=${{ env.MASTER_BRANCH_TAG }}" >> $GITHUB_ENV
+                  echo "BASE_IMAGE_TAG=${{ env.MASTER_BRANCH_TAG }}" >> $GITHUB_ENV
+            - name: Re-set image tag and container registry when on PR
+              if: ${{ github.event_name == 'pull_request' }}
+              run: |
+                  echo "IMAGE_TAG=$(echo "$GITHUB_HEAD_REF" | tr '\\/_:&+,;#*' '-')" >> $GITHUB_ENV
+                  echo "REGISTRY='-Ddocker.registry=ghcr.io'" >> $GITHUB_ENV
+
+            # Necessary to split as otherwise the submodules are not available (deploy skips install)
+            - name: Build app and configbaker container image with local architecture and submodules (profile will skip tests)
+              run: >
+                  mvn -B -f modules/dataverse-parent
+                  -P ct -pl edu.harvard.iq:dataverse -am
+                  install
+            - name: Deploy multi-arch application and configbaker container image
+              run: >
+                  mvn 
+                  -Dapp.image.tag=${{ env.IMAGE_TAG }} -Dbase.image.tag=${{ env.BASE_IMAGE_TAG }}
+                  ${{ env.REGISTRY }} -Ddocker.platforms=${{ env.PLATFORMS }}
+                  -P ct deploy
+
+            - uses: marocchino/sticky-pull-request-comment@v2
+              if: ${{ github.event_name == 'pull_request' }}
+              with:
+                  header: registry-push
+                  hide_and_recreate: true
+                  hide_classify: "OUTDATED"
+                  message: |
+                      :package: Pushed preview images as
+                      ```
+                      ghcr.io/gdcc/dataverse:${{ env.IMAGE_TAG }}
+                      ```
+                      ```
+                      ghcr.io/gdcc/configbaker:${{ env.IMAGE_TAG }}
+                      ```
+                      :ship: [See on GHCR](https://github.com/orgs/gdcc/packages/container). Use by referencing with full name as printed above, mind the registry name.
diff --git a/.github/workflows/container_base_push.yml b/.github/workflows/container_base_push.yml
index 8f440151d0c..b938851f816 100644
--- a/.github/workflows/container_base_push.yml
+++ b/.github/workflows/container_base_push.yml
@@ -1,5 +1,5 @@
 ---
-name: Container Base Module
+name: Base Container Image
 
 on:
     push:
@@ -18,9 +18,12 @@ on:
             - 'modules/container-base/**'
             - 'modules/dataverse-parent/pom.xml'
             - '.github/workflows/container_base_push.yml'
+    schedule:
+        - cron: '23 3 * * 0' # Run for 'develop' every Sunday at 03:23 UTC
 
 env:
     IMAGE_TAG: unstable
+    PLATFORMS: linux/amd64,linux/arm64
 
 jobs:
     build:
@@ -31,7 +34,7 @@ jobs:
             packages: read
         strategy:
             matrix:
-                jdk: [ '11' ]
+                jdk: [ '17' ]
         # Only run in upstream repo - avoid unnecessary runs in forks
         if: ${{ github.repository_owner == 'IQSS' }}
 
@@ -79,7 +82,18 @@ jobs:
               uses: docker/setup-qemu-action@v2
             - name: Re-set image tag based on branch
               if: ${{ github.ref_name == 'master' }}
-              run: echo "IMAGE_TAG=stable"
+              run: echo "IMAGE_TAG=alpha" >> $GITHUB_ENV
             - if: ${{ github.event_name != 'pull_request' }}
               name: Deploy multi-arch base container image to Docker Hub
-              run: mvn -f modules/container-base -Pct deploy -Dbase.image.tag=${{ env.IMAGE_TAG }}
+              run: mvn -f modules/container-base -Pct deploy -Dbase.image.tag=${{ env.IMAGE_TAG }} -Ddocker.platforms=${{ env.PLATFORMS }}
+    push-app-img:
+        name: "Rebase & Publish App Image"
+        permissions:
+            contents: read
+            packages: write
+            pull-requests: write
+        needs: build
+        # We do not release a new base image for pull requests, so do not trigger.
+        if: ${{ github.event_name != 'pull_request' }}
+        uses: ./.github/workflows/container_app_push.yml
+        secrets: inherit
diff --git a/.github/workflows/cypress_ui.yml.future b/.github/workflows/cypress_ui.yml.future
index b38ae2f9558..0823233fdeb 100644
--- a/.github/workflows/cypress_ui.yml.future
+++ b/.github/workflows/cypress_ui.yml.future
@@ -2,6 +2,7 @@
 #
 #   THIS IS AN OLD TRAVIS-CI.ORG JOB FILE
 #   To be used with Github Actions, it would be necessary to refactor it.
+#   In addition, it needs to be rewritten to use our modern containers.
 #   Keeping it as the future example it has been before.
 #   See also #5846
 #
@@ -30,8 +31,6 @@ jobs:
         directories:
           # we also need to cache folder with Cypress binary
           - ~/.cache
-          # we want to cache the Glassfish and Solr dependencies as well
-          - conf/docker-aio/dv/deps
       before_install:
         - cd tests
       install:
diff --git a/.github/workflows/deploy_beta_testing.yml b/.github/workflows/deploy_beta_testing.yml
new file mode 100644
index 00000000000..028f0140cc9
--- /dev/null
+++ b/.github/workflows/deploy_beta_testing.yml
@@ -0,0 +1,85 @@
+name: 'Deploy to Beta Testing'
+
+on:
+  push:
+    branches:
+      - develop
+
+jobs:
+  build:
+    runs-on: ubuntu-latest
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - uses: actions/setup-java@v3
+        with:
+          distribution: 'zulu'
+          java-version: '17'
+
+      - name: Enable API Session Auth feature flag
+        working-directory: src/main/resources/META-INF
+        run: echo -e "dataverse.feature.api-session-auth=true" >> microprofile-config.properties
+
+      - name: Set build number
+        run: scripts/installer/custom-build-number
+
+      - name: Build application war
+        run: mvn package
+
+      - name: Get war file name
+        working-directory: target
+        run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV
+
+      - name: Upload war artifact
+        uses: actions/upload-artifact@v3
+        with:
+          name: built-app
+          path: ./target/${{ env.war_file }}
+
+  deploy-to-payara:
+    needs: build
+    runs-on: ubuntu-latest
+
+    steps:
+      - uses: actions/checkout@v3
+
+      - name: Download war artifact
+        uses: actions/download-artifact@v3
+        with:
+          name: built-app
+          path: ./
+
+      - name: Get war file name
+        run: echo "war_file=$(ls *.war | head -1)">> $GITHUB_ENV
+
+      - name: Copy war file to remote instance
+        uses: appleboy/scp-action@master
+        with:
+          host: ${{ secrets.BETA_PAYARA_INSTANCE_HOST }}
+          username: ${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }}
+          key: ${{ secrets.BETA_PAYARA_INSTANCE_SSH_PRIVATE_KEY }}
+          source: './${{ env.war_file }}'
+          target: '/home/${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }}'
+          overwrite: true
+
+      - name: Execute payara war deployment remotely
+        uses: appleboy/ssh-action@v1.0.0
+        env:
+          INPUT_WAR_FILE: ${{ env.war_file }}
+        with:
+          host: ${{ secrets.BETA_PAYARA_INSTANCE_HOST }}
+          username: ${{ secrets.BETA_PAYARA_INSTANCE_USERNAME }}
+          key: ${{ secrets.BETA_PAYARA_INSTANCE_SSH_PRIVATE_KEY }}
+          envs: INPUT_WAR_FILE
+          script: |
+            APPLICATION_NAME=dataverse-backend
+            ASADMIN='/usr/local/payara6/bin/asadmin --user admin'
+            $ASADMIN undeploy $APPLICATION_NAME
+            $ASADMIN stop-domain
+            rm -rf /usr/local/payara6/glassfish/domains/domain1/generated
+            rm -rf /usr/local/payara6/glassfish/domains/domain1/osgi-cache
+            $ASADMIN start-domain
+            $ASADMIN deploy --name $APPLICATION_NAME $INPUT_WAR_FILE
+            $ASADMIN stop-domain
+            $ASADMIN start-domain
diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml
index e2048f73431..a70c55fc31d 100644
--- a/.github/workflows/maven_unit_test.yml
+++ b/.github/workflows/maven_unit_test.yml
@@ -1,4 +1,4 @@
-name: Maven Unit Tests
+name: Maven Tests
 
 on:
     push:
@@ -6,11 +6,15 @@ on:
             - "**.java"
             - "pom.xml"
             - "modules/**/pom.xml"
+            - "!modules/container-base/**"
+            - "!modules/dataverse-spi/**"
     pull_request:
         paths:
             - "**.java"
             - "pom.xml"
             - "modules/**/pom.xml"
+            - "!modules/container-base/**"
+            - "!modules/dataverse-spi/**"
 
 jobs:
     unittest:
@@ -18,9 +22,66 @@ jobs:
         strategy:
             fail-fast: false
             matrix:
-                jdk: [ '11' ]
+                jdk: [ '17' ]
                 experimental: [false]
                 status:  ["Stable"]
+        continue-on-error: ${{ matrix.experimental }}
+        runs-on: ubuntu-latest
+        steps:
+          # Basic setup chores
+          - uses: actions/checkout@v3
+          - name: Set up JDK ${{ matrix.jdk }}
+            uses: actions/setup-java@v3
+            with:
+                java-version: ${{ matrix.jdk }}
+                distribution: temurin
+                cache: maven
+
+          # The reason why we use "install" here is that we want the submodules to be available in the next step.
+          # Also, we can cache them this way for jobs triggered by this one. We need to skip ITs here, as we run
+          # them in the next job - but install usually runs through verify phase.
+          - name: Build with Maven and run unit tests
+            run: > 
+                mvn -B -f modules/dataverse-parent
+                -Dtarget.java.version=${{ matrix.jdk }}
+                -DcompilerArgument=-Xlint:unchecked -P all-unit-tests
+                -DskipIntegrationTests
+                -pl edu.harvard.iq:dataverse -am
+                install
+
+          # We don't want to cache the WAR file, so delete it
+          - run: rm -rf ~/.m2/repository/edu/harvard/iq/dataverse
+
+          # Upload the built war file. For download, it will be wrapped in a ZIP by GitHub.
+          # See also https://github.com/actions/upload-artifact#zipped-artifact-downloads
+          - uses: actions/upload-artifact@v3
+            with:
+                name: dataverse-java${{ matrix.jdk }}.war
+                path: target/dataverse*.war
+                retention-days: 7
+
+          # Store the build for the next step (integration test) to avoid recompilation and to transfer coverage reports
+          - run: |
+                tar -cvf java-builddir.tar target
+                tar -cvf java-m2-selection.tar ~/.m2/repository/io/gdcc/dataverse-*
+          - uses: actions/upload-artifact@v3
+            with:
+                name: java-artifacts
+                path: |
+                    java-builddir.tar
+                    java-m2-selection.tar
+                retention-days: 3
+
+    integration-test:
+        runs-on: ubuntu-latest
+        needs: unittest
+        name: (${{ matrix.status}} / JDK ${{ matrix.jdk }}) Integration Tests
+        strategy:
+            fail-fast: false
+            matrix:
+                jdk: [ '17' ]
+                experimental: [ false ]
+                status: [ "Stable" ]
                 #
                 # JDK 17 builds disabled due to non-essential fails marking CI jobs as completely failed within
                 # Github Projects, PR lists etc. This was consensus on Slack #dv-tech. See issue #8094
@@ -31,24 +92,74 @@ jobs:
                 #      experimental: true
                 #      status: "Experimental"
         continue-on-error: ${{ matrix.experimental }}
+        steps:
+            # Basic setup chores
+            - uses: actions/checkout@v3
+            - name: Set up JDK ${{ matrix.jdk }}
+              uses: actions/setup-java@v3
+              with:
+                  java-version: ${{ matrix.jdk }}
+                  distribution: temurin
+                  cache: maven
+
+            # Get the build output from the unit test job
+            - uses: actions/download-artifact@v3
+              with:
+                  name: java-artifacts
+            - run: |
+                tar -xvf java-builddir.tar
+                tar -xvf java-m2-selection.tar -C /
+
+            # Run integration tests (but not unit tests again)
+            - run: mvn -DskipUnitTests -Dtarget.java.version=${{ matrix.jdk }} verify
+
+            # Wrap up and send to coverage job
+            - run: tar -cvf java-reportdir.tar target/site
+            - uses: actions/upload-artifact@v3
+              with:
+                  name: java-reportdir
+                  path: java-reportdir.tar
+                  retention-days: 3
+
+    coverage-report:
         runs-on: ubuntu-latest
+        needs: integration-test
+        name: Coverage Report Submission
         steps:
-          - uses: actions/checkout@v2
-          - name: Set up JDK ${{ matrix.jdk }}
-            uses: actions/setup-java@v2
+            # Basic setup chores
+          - uses: actions/checkout@v3
+          - uses: actions/setup-java@v3
             with:
-                java-version: ${{ matrix.jdk }}
-                distribution: 'adopt'
-          - name: Cache Maven packages
-            uses: actions/cache@v2
+                java-version: '17'
+                distribution: temurin
+                cache: maven
+
+          # Get the build output from the integration test job
+          - uses: actions/download-artifact@v3
             with:
-                path: ~/.m2
-                key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
-                restore-keys: ${{ runner.os }}-m2
-          - name: Build with Maven
-            run: mvn -DcompilerArgument=-Xlint:unchecked -Dtarget.java.version=${{ matrix.jdk }} -P all-unit-tests clean test
-          - name: Maven Code Coverage
+                name: java-reportdir
+          - run: tar -xvf java-reportdir.tar
+
+          # Deposit Code Coverage
+          - name: Deposit Code Coverage
             env:
                 CI_NAME: github
                 COVERALLS_SECRET: ${{ secrets.GITHUB_TOKEN }}
-            run: mvn -V -B jacoco:report coveralls:report -DrepoToken=${COVERALLS_SECRET} -DpullRequest=${{ github.event.number }}
\ No newline at end of file
+            # The coverage commit is sometimes flaky. Don't bail out just because this optional step failed.
+            continue-on-error: true
+            run: >
+                mvn -B
+                -DrepoToken=${COVERALLS_SECRET} -DpullRequest=${{ github.event.number }}
+                jacoco:report coveralls:report
+
+          # NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ...
+
+    push-app-img:
+        name: Publish App Image
+        permissions:
+            contents: read
+            packages: write
+            pull-requests: write
+        needs: integration-test
+        uses: ./.github/workflows/container_app_push.yml
+        secrets: inherit
diff --git a/.github/workflows/pr_comment_commands.yml b/.github/workflows/pr_comment_commands.yml
new file mode 100644
index 00000000000..5ff75def623
--- /dev/null
+++ b/.github/workflows/pr_comment_commands.yml
@@ -0,0 +1,20 @@
+name: PR Comment Commands
+on:
+    issue_comment:
+        types: [created]
+jobs:
+    dispatch:
+        # Avoid being triggered by forks in upstream
+        if: ${{ github.repository_owner == 'IQSS' }}
+        runs-on: ubuntu-latest
+        steps:
+            - name: Dispatch
+              uses: peter-evans/slash-command-dispatch@v3
+              with:
+                  # This token belongs to @dataversebot and has sufficient scope.
+                  token: ${{ secrets.GHCR_TOKEN }}
+                  commands: |
+                      push-image
+                  repository: IQSS/dataverse
+                  # Commenter must have at least write permission to repo to trigger dispatch
+                  permission: write
diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml
index 2d910f54127..56f7d648dc4 100644
--- a/.github/workflows/shellcheck.yml
+++ b/.github/workflows/shellcheck.yml
@@ -1,19 +1,27 @@
 name: "Shellcheck"
 on:
     push:
+        branches:
+            - develop
         paths:
-            - conf/solr/**
-            - modules/container-base/**
+            - conf/solr/**/.sh
+            - modules/container-base/**/*.sh
+            - modules/container-configbaker/**/*.sh
     pull_request:
+        branches:
+            - develop
         paths:
-            - conf/solr/**
-            - modules/container-base/**
+            - conf/solr/**/*.sh
+            - modules/container-base/**/*.sh
+            - modules/container-configbaker/**/*.sh
 jobs:
     shellcheck:
         name: Shellcheck
         runs-on: ubuntu-latest
+        permissions:
+            pull-requests: write
         steps:
-            - uses: actions/checkout@v2
+            - uses: actions/checkout@v3
             - name: shellcheck
               uses: reviewdog/action-shellcheck@v1
               with:
@@ -21,4 +29,17 @@ jobs:
                   reporter: github-pr-review # Change reporter.
                   fail_on_error: true
                   # Container base image uses dumb-init shebang, so nail to using bash
-                  shellcheck_flags: "--shell=bash --external-sources"
\ No newline at end of file
+                  shellcheck_flags: "--shell=bash --external-sources"
+                  # Exclude old scripts
+                  exclude: |
+                      */.git/*
+                      doc/*
+                      downloads/*
+                      scripts/database/*
+                      scripts/globalid/*
+                      scripts/icons/*
+                      scripts/installer/*
+                      scripts/issues/*
+                      scripts/r/*
+                      scripts/tests/*
+                      tests/*
diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml
index 5c251cfc897..227a74fa00f 100644
--- a/.github/workflows/shellspec.yml
+++ b/.github/workflows/shellspec.yml
@@ -60,7 +60,7 @@ jobs:
                   shellspec
     shellspec-macos:
         name: "MacOS"
-        runs-on: macos-10.15
+        runs-on: macos-latest
         steps:
             - name: Install shellspec
               run: curl -fsSL https://git.io/shellspec | sh -s 0.28.1 --yes
diff --git a/.github/workflows/spi_release.yml b/.github/workflows/spi_release.yml
new file mode 100644
index 00000000000..8ad74b3e4bb
--- /dev/null
+++ b/.github/workflows/spi_release.yml
@@ -0,0 +1,94 @@
+name: Dataverse SPI
+
+on:
+    push:
+        branches:
+            - "develop"
+        paths:
+            - "modules/dataverse-spi/**"
+    pull_request:
+        branches:
+            - "develop"
+        paths:
+            - "modules/dataverse-spi/**"
+
+jobs:
+    # Note: Pushing packages to Maven Central requires access to secrets, which pull requests from remote forks
+    #       don't have. Skip in these cases.
+    check-secrets:
+        name: Check for Secrets Availability
+        runs-on: ubuntu-latest
+        outputs:
+            available: ${{ steps.secret-check.outputs.available }}
+        steps:
+            -   id: secret-check
+                # perform secret check & put boolean result as an output
+                shell: bash
+                run: |
+                    if [ "${{ secrets.DATAVERSEBOT_SONATYPE_USERNAME }}" != '' ]; then
+                        echo "available=true" >> $GITHUB_OUTPUT;
+                    else
+                        echo "available=false" >> $GITHUB_OUTPUT;
+                    fi
+
+    snapshot:
+        name: Release Snapshot
+        needs: check-secrets
+        runs-on: ubuntu-latest
+        if: github.event_name == 'pull_request' && needs.check-secrets.outputs.available == 'true'
+        steps:
+            - uses: actions/checkout@v3
+            - uses: actions/setup-java@v3
+              with:
+                  java-version: '17'
+                  distribution: 'adopt'
+                  server-id: ossrh
+                  server-username: MAVEN_USERNAME
+                  server-password: MAVEN_PASSWORD
+            - uses: actions/cache@v2
+              with:
+                  path: ~/.m2
+                  key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
+                  restore-keys: ${{ runner.os }}-m2
+
+            - name: Deploy Snapshot
+              run: mvn -f modules/dataverse-spi -Dproject.version.suffix="-PR${{ github.event.number }}-SNAPSHOT" deploy
+              env:
+                  MAVEN_USERNAME: ${{ secrets.DATAVERSEBOT_SONATYPE_USERNAME }}
+                  MAVEN_PASSWORD: ${{ secrets.DATAVERSEBOT_SONATYPE_TOKEN }}
+
+    release:
+        name: Release
+        needs: check-secrets
+        runs-on: ubuntu-latest
+        if: github.event_name == 'push' && needs.check-secrets.outputs.available == 'true'
+        steps:
+            -   uses: actions/checkout@v3
+            -   uses: actions/setup-java@v3
+                with:
+                    java-version: '17'
+                    distribution: 'adopt'
+            -   uses: actions/cache@v2
+                with:
+                    path: ~/.m2
+                    key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
+                    restore-keys: ${{ runner.os }}-m2
+
+            # Running setup-java again overwrites the settings.xml - IT'S MANDATORY TO DO THIS SECOND SETUP!!!
+            -   name: Set up Maven Central Repository
+                uses: actions/setup-java@v3
+                with:
+                    java-version: '17'
+                    distribution: 'adopt'
+                    server-id: ossrh
+                    server-username: MAVEN_USERNAME
+                    server-password: MAVEN_PASSWORD
+                    gpg-private-key: ${{ secrets.DATAVERSEBOT_GPG_KEY }}
+                    gpg-passphrase: MAVEN_GPG_PASSPHRASE
+
+            -   name: Sign + Publish Release
+                run: mvn -f modules/dataverse-spi -P release deploy
+                env:
+                    MAVEN_USERNAME: ${{ secrets.DATAVERSEBOT_SONATYPE_USERNAME }}
+                    MAVEN_PASSWORD: ${{ secrets.DATAVERSEBOT_SONATYPE_TOKEN }}
+                    MAVEN_GPG_PASSPHRASE: ${{ secrets.DATAVERSEBOT_GPG_PASSWORD }}
diff --git a/.gitignore b/.gitignore
index 83671abf43e..7f0d3a2b466 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,7 +18,6 @@ GRTAGS
 .Trashes
 ehthumbs.db
 Thumbs.db
-.vagrant
 *.pyc
 *.swp
 scripts/api/py_api_wrapper/demo-data/*
@@ -39,17 +38,6 @@ scripts/api/setup-all.*.log
 # ctags generated tag file
 tags
 
-# dependencies I'm not sure we're allowed to redistribute / have in version control
-conf/docker-aio/dv/deps/
-
-# no need to check aoi installer zip into vc
-conf/docker-aio/dv/install/dvinstall.zip
-# or copy of test data
-conf/docker-aio/testdata/
-
-# docker-aio creates maven/ which reports 86 new files. ignore this wd.
-maven/
-
 scripts/installer/default.config
 *.pem
 
@@ -71,5 +59,5 @@ scripts/search/data/binary/trees.png.thumb140
 src/main/webapp/resources/images/cc0.png.thumb140
 src/main/webapp/resources/images/dataverseproject.png.thumb140
 
-# apache-maven is downloaded by docker-aio
-apache-maven*
+# Docker development volumes
+/docker-dev-volumes
diff --git a/.readthedocs.yml b/.readthedocs.yml
new file mode 100644
index 00000000000..cadaedc1448
--- /dev/null
+++ b/.readthedocs.yml
@@ -0,0 +1,21 @@
+version: 2
+
+# HTML is always built, these are additional formats only
+formats:
+    - pdf
+
+build:
+    os: ubuntu-22.04
+    tools:
+        python: "3.10"
+    apt_packages:
+        - graphviz
+
+python:
+    install:
+        - requirements: doc/sphinx-guides/requirements.txt
+
+
+sphinx:
+    configuration: doc/sphinx-guides/source/conf.py
+    fail_on_warning: true
diff --git a/README.md b/README.md
index d40e5f228f7..831dbfed5ff 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
 Dataverse®  
 ===============
 
-Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Data Science and Products team](http://www.iq.harvard.edu/people/people/data-science-products) at the [Institute for Quantitative Social Science](http://iq.harvard.edu/) and the [Dataverse community][]).
+Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Dataverse team](https://dataverse.org/about) at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][]).
 
 [dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our development [roadmap][], and more.
 
@@ -15,7 +15,7 @@ We love contributors! Please see our [Contributing Guide][] for ways you can hel
 
 Dataverse is a trademark of President and Fellows of Harvard College and is registered in the United States.
 
-[![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg?raw=true "Dataverse Project")](http://dataverse.org)
+[![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg "Dataverse Project")](http://dataverse.org)
 
 [![API Test Status](https://jenkins.dataverse.org/buildStatus/icon?job=IQSS-dataverse-develop&subject=API%20Test%20Status)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/)
 [![API Test Coverage](https://img.shields.io/jenkins/coverage/jacoco?jobUrl=https%3A%2F%2Fjenkins.dataverse.org%2Fjob%2FIQSS-dataverse-develop&label=API%20Test%20Coverage)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/ws/target/coverage-it/index.html)
@@ -26,15 +26,15 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi
 [dataverse.org]: https://dataverse.org
 [demo.dataverse.org]: https://demo.dataverse.org
 [Dataverse community]: https://dataverse.org/developers
-[Installation Guide]: http://guides.dataverse.org/en/latest/installation/index.html
+[Installation Guide]: https://guides.dataverse.org/en/latest/installation/index.html
 [latest release]: https://github.com/IQSS/dataverse/releases
 [features]: https://dataverse.org/software-features
 [roadmap]: https://www.iq.harvard.edu/roadmap-dataverse-project
 [integrations]: https://dataverse.org/integrations
-[REST APIs]: http://guides.dataverse.org/en/latest/api/index.html
+[REST APIs]: https://guides.dataverse.org/en/latest/api/index.html
 [Contributing Guide]: CONTRIBUTING.md
 [mailing list]: https://groups.google.com/group/dataverse-community
 [community call]: https://dataverse.org/community-calls
-[chat.dataverse.org]: http://chat.dataverse.org
+[chat.dataverse.org]: https://chat.dataverse.org
 [Dataverse Community Meeting]: https://dataverse.org/events
 [open source]: LICENSE.md
diff --git a/Vagrantfile b/Vagrantfile
deleted file mode 100644
index 8293fbaf5fc..00000000000
--- a/Vagrantfile
+++ /dev/null
@@ -1,27 +0,0 @@
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-VAGRANTFILE_API_VERSION = "2"
-
-Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
-  config.vm.box = "bento/rockylinux-8.4"
-
-  config.vm.provider "virtualbox" do |vbox|
-    vbox.cpus = 4
-    vbox.memory = 4096
-  end
-
-  config.vm.provision "shell", path: "scripts/vagrant/setup.sh"
-  config.vm.provision "shell", path: "scripts/vagrant/setup-solr.sh"
-  config.vm.provision "shell", path: "scripts/vagrant/install-dataverse.sh"
-
-  config.vm.network "private_network", type: "dhcp"
-  config.vm.network "forwarded_port", guest: 80, host: 8888
-  config.vm.network "forwarded_port", guest: 443, host: 9999
-  config.vm.network "forwarded_port", guest: 8983, host: 8993
-  config.vm.network "forwarded_port", guest: 8080, host: 8088
-  config.vm.network "forwarded_port", guest: 8181, host: 8188
-
-  config.vm.synced_folder ".", "/dataverse"
-
-end
diff --git a/conf/docker-aio/0prep_deps.sh b/conf/docker-aio/0prep_deps.sh
deleted file mode 100755
index 13a91705303..00000000000
--- a/conf/docker-aio/0prep_deps.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-if [ ! -d dv/deps ]; then
-	mkdir -p dv/deps
-fi
-wdir=`pwd`
-
-if [ ! -e dv/deps/payara-5.2022.3.zip ]; then
-	echo "payara dependency prep"
-	wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip  -O dv/deps/payara-5.2022.3.zip
-fi
-
-if [ ! -e dv/deps/solr-8.11.1dv.tgz ]; then
-	echo "solr dependency prep"
-	# schema changes *should* be the only ones...
-	cd dv/deps/	
-	wget https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz -O solr-8.11.1dv.tgz
-	cd ../../
-fi
-
diff --git a/conf/docker-aio/1prep.sh b/conf/docker-aio/1prep.sh
deleted file mode 100755
index 508d41d93ff..00000000000
--- a/conf/docker-aio/1prep.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-
-# move things necessary for integration tests into build context.
-# this was based off the phoenix deployment; and is likely uglier and bulkier than necessary in a perfect world
-
-mkdir -p testdata/doc/sphinx-guides/source/_static/util/
-cp ../solr/8.11.1/schema*.xml testdata/
-cp ../solr/8.11.1/solrconfig.xml testdata/
-cp ../jhove/jhove.conf testdata/
-cp ../jhove/jhoveConfig.xsd testdata/
-cd ../../
-cp -r scripts conf/docker-aio/testdata/
-cp doc/sphinx-guides/source/_static/util/createsequence.sql conf/docker-aio/testdata/doc/sphinx-guides/source/_static/util/
-
-wget -q https://downloads.apache.org/maven/maven-3/3.8.5/binaries/apache-maven-3.8.5-bin.tar.gz
-tar xfz apache-maven-3.8.5-bin.tar.gz
-mkdir maven
-mv apache-maven-3.8.5/* maven/
-echo "export JAVA_HOME=/usr/lib/jvm/jre-openjdk" > maven/maven.sh
-echo "export M2_HOME=../maven" >> maven/maven.sh
-echo "export MAVEN_HOME=../maven" >> maven/maven.sh
-echo "export PATH=../maven/bin:${PATH}" >> maven/maven.sh
-chmod 0755 maven/maven.sh
-
-# not using dvinstall.zip for setupIT.bash; but still used in install.bash for normal ops
-source maven/maven.sh && mvn clean
-./scripts/installer/custom-build-number
-source maven/maven.sh && mvn package
-cd scripts/installer
-make clean
-make
-mkdir -p ../../conf/docker-aio/dv/install
-cp dvinstall.zip ../../conf/docker-aio/dv/install/
-
-# ITs sometimes need files server-side
-# yes, these copies could be avoided by moving the build root here. but the build 
-#  context is already big enough that it seems worth avoiding.
-cd ../../
-cp src/test/java/edu/harvard/iq/dataverse/makedatacount/sushi_sample_logs.json conf/docker-aio/testdata/
diff --git a/conf/docker-aio/c8.dockerfile b/conf/docker-aio/c8.dockerfile
deleted file mode 100644
index 0002464cbf2..00000000000
--- a/conf/docker-aio/c8.dockerfile
+++ /dev/null
@@ -1,87 +0,0 @@
-FROM rockylinux/rockylinux:latest
-# OS dependencies
-# IQSS now recommends Postgres 13.
-RUN dnf -qy module disable postgresql
-RUN yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm
-
-RUN echo "fastestmirror=true" >> /etc/dnf/dnf.conf
-RUN yum install -y java-11-openjdk-devel postgresql13-server sudo epel-release unzip curl httpd python2 diffutils
-RUN yum install -y jq lsof awscli
-
-# for older search scripts
-RUN ln -s /usr/bin/python2 /usr/bin/python
-
-# copy and unpack dependencies (solr, payara)
-COPY dv /tmp/dv
-COPY testdata/schema*.xml /tmp/dv/
-COPY testdata/solrconfig.xml /tmp/dv
-
-# ITs need files
-COPY testdata/sushi_sample_logs.json /tmp/
-
-# IPv6 and localhost appears to be related to some of the intermittant connection issues
-COPY disableipv6.conf /etc/sysctl.d/
-RUN rm /etc/httpd/conf/*
-COPY httpd.conf /etc/httpd/conf 
-RUN cd /opt ; tar zxf /tmp/dv/deps/solr-8.11.1dv.tgz
-RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2022.3.zip ; ln -s /opt/payara5 /opt/glassfish4
-
-# this copy of domain.xml is the result of running `asadmin set server.monitoring-service.module-monitoring-levels.jvm=LOW` on a default glassfish installation (aka - enable the glassfish REST monitir endpoint for the jvm`
-# this dies under Java 11, do we keep it?
-#COPY domain-restmonitor.xml /opt/payara5/glassfish/domains/domain1/config/domain.xml
-
-RUN sudo -u postgres /usr/pgsql-13/bin/initdb -D /var/lib/pgsql/13/data -E 'UTF-8'
-
-# copy configuration related files
-RUN cp /tmp/dv/pg_hba.conf /var/lib/pgsql/13/data/
-RUN cp -r /opt/solr-8.11.1/server/solr/configsets/_default /opt/solr-8.11.1/server/solr/collection1
-RUN cp /tmp/dv/schema*.xml /opt/solr-8.11.1/server/solr/collection1/conf/
-RUN cp /tmp/dv/solrconfig.xml /opt/solr-8.11.1/server/solr/collection1/conf/solrconfig.xml
-
-# skipping payara user and solr user (run both as root)
-
-#solr port
-EXPOSE 8983
-
-# postgres port
-EXPOSE 5432
-
-# payara port
-EXPOSE 8080
-
-# apache port, http
-EXPOSE 80
-
-# debugger ports (jmx,jdb)
-EXPOSE 8686
-EXPOSE 9009
-
-RUN mkdir /opt/dv
-
-# keeping the symlink on the off chance that something else is still assuming /usr/local/glassfish4
-RUN ln -s /opt/payara5 /usr/local/glassfish4
-COPY dv/install/ /opt/dv/
-COPY install.bash /opt/dv/
-COPY entrypoint.bash /opt/dv/
-COPY testdata /opt/dv/testdata
-COPY testscripts/* /opt/dv/testdata/
-COPY setupIT.bash /opt/dv
-WORKDIR /opt/dv
-
-# need to take DOI provider info from build args as of ec377d2a4e27424db8815c55ce544deee48fc5e0
-# Default to EZID; use built-args to switch to DataCite (or potentially handles)
-#ARG DoiProvider=EZID
-ARG DoiProvider=FAKE
-ARG doi_baseurl=https://ezid.cdlib.org
-ARG doi_username=apitest
-ARG doi_password=apitest
-ENV DoiProvider=${DoiProvider}
-ENV doi_baseurl=${doi_baseurl}
-ENV doi_username=${doi_username}
-ENV doi_password=${doi_password}
-COPY configure_doi.bash /opt/dv
-
-# healthcheck for payara only (assumes modified domain.xml);
-#  does not check dataverse application status.
-HEALTHCHECK CMD curl --fail http://localhost:4848/monitoring/domain/server.json || exit 1
-CMD ["/opt/dv/entrypoint.bash"]
diff --git a/conf/docker-aio/configure_doi.bash b/conf/docker-aio/configure_doi.bash
deleted file mode 100755
index f0f0bc6d0d4..00000000000
--- a/conf/docker-aio/configure_doi.bash
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env bash
-
-cd /opt/payara5
-
-# if appropriate; reconfigure PID provider on the basis of environmental variables.
-if [ ! -z "${DoiProvider}" ]; then
-        curl -X PUT -d ${DoiProvider} http://localhost:8080/api/admin/settings/:DoiProvider
-fi
-if [ ! -z "${doi_username}" ]; then
-        bin/asadmin create-jvm-options "-Ddoi.username=${doi_username}"
-fi
-if [ ! -z "${doi_password}" ]; then
-        bin/asadmin create-jvm-options "-Ddoi.password=${doi_password}"
-fi
-if [ ! -z "${doi_baseurl}" ]; then
-        bin/asadmin delete-jvm-options "-Ddoi.baseurlstring=https\://mds.test.datacite.org"
-        doi_baseurl_esc=`echo ${doi_baseurl} | sed -e 's/:/\\\:/'`
-        bin/asadmin create-jvm-options "-Ddoi.baseurlstring=${doi_baseurl_esc}"
-fi
-if [ ! -z "${doi_dataciterestapiurl}" ]; then
-        bin/asadmin delete-jvm-options "-Ddoi.dataciterestapiurlstring=https\://api.test.datacite.org"
-        doi_dataciterestapiurl_esc=`echo ${doi_dataciterestapiurl} | sed -e 's/:/\\\:/'`
-        bin/asadmin create-jvm-options "-Ddoi.dataciterestapiurlstring=${doi_dataciterestapiurl_esc}"
-fi
diff --git a/conf/docker-aio/disableipv6.conf b/conf/docker-aio/disableipv6.conf
deleted file mode 100644
index 8d425183e3f..00000000000
--- a/conf/docker-aio/disableipv6.conf
+++ /dev/null
@@ -1 +0,0 @@
-net.ipv6.conf.all.disable_ipv6 = 1
diff --git a/conf/docker-aio/domain-restmonitor.xml b/conf/docker-aio/domain-restmonitor.xml
deleted file mode 100644
index a18a88ab011..00000000000
--- a/conf/docker-aio/domain-restmonitor.xml
+++ /dev/null
@@ -1,486 +0,0 @@
-<!--
-
-    DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
-
-    Copyright (c) 2012-2014 Oracle and/or its affiliates. All rights reserved.
-
-    The contents of this file are subject to the terms of either the GNU
-    General Public License Version 2 only ("GPL") or the Common Development
-    and Distribution License("CDDL") (collectively, the "License").  You
-    may not use this file except in compliance with the License.  You can
-    obtain a copy of the License at
-    https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
-    or packager/legal/LICENSE.txt.  See the License for the specific
-    language governing permissions and limitations under the License.
-
-    When distributing the software, include this License Header Notice in each
-    file and include the License file at packager/legal/LICENSE.txt.
-
-    GPL Classpath Exception:
-    Oracle designates this particular file as subject to the "Classpath"
-    exception as provided by Oracle in the GPL Version 2 section of the License
-    file that accompanied this code.
-
-    Modifications:
-    If applicable, add the following below the License Header, with the fields
-    enclosed by brackets [] replaced by your own identifying information:
-    "Portions Copyright [year] [name of copyright owner]"
-
-    Contributor(s):
-    If you wish your version of this file to be governed by only the CDDL or
-    only the GPL Version 2, indicate your decision by adding "[Contributor]
-    elects to include this software in this distribution under the [CDDL or GPL
-    Version 2] license."  If you don't indicate a single choice of license, a
-    recipient has the option to distribute your version of this file under
-    either the CDDL, the GPL Version 2 or to extend the choice of license to
-    its licensees as provided above.  However, if you add GPL Version 2 code
-    and therefore, elected the GPL Version 2 license, then the option applies
-    only if the new code is made subject to such option by the copyright
-    holder.
-
--->
-
-<domain log-root="${com.sun.aas.instanceRoot}/logs" application-root="${com.sun.aas.instanceRoot}/applications" version="10.0">
-<security-configurations>
-    <authentication-service default="true" name="adminAuth" use-password-credential="true">
-      <security-provider name="spcrealm" type="LoginModule" provider-name="adminSpc">
-        <login-module-config name="adminSpecialLM" control-flag="sufficient" module-class="com.sun.enterprise.admin.util.AdminLoginModule">
-          <property name="config" value="server-config"></property>
-          <property name="auth-realm" value="admin-realm"></property>
-        </login-module-config>
-      </security-provider>
-      <security-provider name="filerealm" type="LoginModule" provider-name="adminFile">
-        <login-module-config name="adminFileLM" control-flag="sufficient" module-class="com.sun.enterprise.security.auth.login.FileLoginModule">
-          <property name="config" value="server-config"></property>
-          <property name="auth-realm" value="admin-realm"></property>
-        </login-module-config>
-      </security-provider>
-    </authentication-service>
-    <authorization-service default="true" name="authorizationService">
-      <security-provider name="simpleAuthorization" type="Simple" provider-name="simpleAuthorizationProvider">
-        <authorization-provider-config support-policy-deploy="false" name="simpleAuthorizationProviderConfig"></authorization-provider-config>
-      </security-provider>
-    </authorization-service>
-  </security-configurations>
-  <system-applications />
-  <resources>
-    <jdbc-resource pool-name="__TimerPool" jndi-name="jdbc/__TimerPool" object-type="system-admin" />
-    <jdbc-resource pool-name="DerbyPool" jndi-name="jdbc/__default" object-type="system-all" />
-    <jdbc-connection-pool name="__TimerPool" datasource-classname="org.apache.derby.jdbc.EmbeddedXADataSource" res-type="javax.sql.XADataSource">
-      <property value="${com.sun.aas.instanceRoot}/lib/databases/ejbtimer" name="databaseName" />
-      <property value=";create=true" name="connectionAttributes" />
-    </jdbc-connection-pool>
-    <jdbc-connection-pool is-isolation-level-guaranteed="false" name="DerbyPool" datasource-classname="org.apache.derby.jdbc.ClientDataSource" res-type="javax.sql.DataSource">
-      <property value="1527" name="PortNumber" />
-      <property value="APP" name="Password" />
-      <property value="APP" name="User" />
-      <property value="localhost" name="serverName" />
-      <property value="sun-appserv-samples" name="DatabaseName" />
-      <property value=";create=true" name="connectionAttributes" />
-    </jdbc-connection-pool>
-  </resources>
-  <servers>
-    <server name="server" config-ref="server-config">
-      <resource-ref ref="jdbc/__TimerPool" />
-      <resource-ref ref="jdbc/__default" />
-    </server>
-  </servers>
-  <nodes>
-    <node name="localhost-domain1" type="CONFIG" node-host="localhost" install-dir="${com.sun.aas.productRoot}"/>
-  </nodes>
- <configs>
-   <config name="server-config">
-      <system-property name="JMS_PROVIDER_PORT" value="7676" description="Port Number that JMS Service will listen for remote clients connection." />
-      
-      <http-service>
-        <access-log/>
-        <virtual-server id="server" network-listeners="http-listener-1,http-listener-2"/>
-        <virtual-server id="__asadmin" network-listeners="admin-listener"/>
-      </http-service>
-      <iiop-service>
-        <orb use-thread-pool-ids="thread-pool-1" />
-        <iiop-listener address="0.0.0.0" port="3700" id="orb-listener-1" lazy-init="true"/>
-        <iiop-listener security-enabled="true" address="0.0.0.0" port="3820" id="SSL">
-          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" />
-        </iiop-listener>
-        <iiop-listener security-enabled="true" address="0.0.0.0" port="3920" id="SSL_MUTUALAUTH">
-          <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" client-auth-enabled="true" />
-        </iiop-listener>
-      </iiop-service>
-      <admin-service auth-realm-name="admin-realm" type="das-and-server" system-jmx-connector-name="system">
-        <jmx-connector auth-realm-name="admin-realm" security-enabled="false" address="0.0.0.0" port="8686" name="system" />
-        <property value="/admin" name="adminConsoleContextRoot" />
-        <property value="${com.sun.aas.installRoot}/lib/install/applications/admingui.war" name="adminConsoleDownloadLocation" />
-        <property value="${com.sun.aas.installRoot}/.." name="ipsRoot" />
-      </admin-service>
-      <connector-service shutdown-timeout-in-seconds="30">
-      </connector-service>
-       <transaction-service tx-log-dir="${com.sun.aas.instanceRoot}/logs" />
-       <diagnostic-service />
-      <security-service>
-        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="admin-realm">
-          <property value="${com.sun.aas.instanceRoot}/config/admin-keyfile" name="file" />
-          <property value="fileRealm" name="jaas-context" />
-        </auth-realm>
-        <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="file">
-          <property value="${com.sun.aas.instanceRoot}/config/keyfile" name="file" />
-          <property value="fileRealm" name="jaas-context" />
-        </auth-realm>
-        <auth-realm classname="com.sun.enterprise.security.auth.realm.certificate.CertificateRealm" name="certificate" />
-        <jacc-provider policy-configuration-factory-provider="com.sun.enterprise.security.provider.PolicyConfigurationFactoryImpl" policy-provider="com.sun.enterprise.security.provider.PolicyWrapper" name="default">
-          <property value="${com.sun.aas.instanceRoot}/generated/policy" name="repository" />
-        </jacc-provider>
-        <jacc-provider policy-configuration-factory-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyConfigurationFactory" policy-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyProvider" name="simple" />
-        <audit-module classname="com.sun.enterprise.security.ee.Audit" name="default">
-          <property value="false" name="auditOn" />
-        </audit-module>
-        <message-security-config auth-layer="SOAP">
-          <provider-config provider-id="XWS_ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule" provider-type="client">
-            <request-policy auth-source="content" />
-            <response-policy auth-source="content" />
-            <property value="s1as" name="encryption.key.alias" />
-            <property value="s1as" name="signature.key.alias" />
-            <property value="false" name="dynamic.username.password" />
-            <property value="false" name="debug" />
-          </provider-config>
-          <provider-config provider-id="ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule" provider-type="client">
-            <request-policy auth-source="content" />
-            <response-policy auth-source="content" />
-            <property value="s1as" name="encryption.key.alias" />
-            <property value="s1as" name="signature.key.alias" />
-            <property value="false" name="dynamic.username.password" />
-            <property value="false" name="debug" />
-            <property value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml" name="security.config" />
-          </provider-config>
-          <provider-config provider-id="XWS_ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule" provider-type="server">
-            <request-policy auth-source="content" />
-            <response-policy auth-source="content" />
-            <property value="s1as" name="encryption.key.alias" />
-            <property value="s1as" name="signature.key.alias" />
-            <property value="false" name="debug" />
-          </provider-config>
-          <provider-config provider-id="ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule" provider-type="server">
-            <request-policy auth-source="content" />
-            <response-policy auth-source="content" />
-            <property value="s1as" name="encryption.key.alias" />
-            <property value="s1as" name="signature.key.alias" />
-            <property value="false" name="debug" />
-            <property value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml" name="security.config" />
-          </provider-config>
-        </message-security-config>
-        <message-security-config auth-layer="HttpServlet">
-            <provider-config provider-type="server" provider-id="GFConsoleAuthModule" class-name="org.glassfish.admingui.common.security.AdminConsoleAuthModule">
-                <request-policy auth-source="sender"></request-policy>
-                <response-policy></response-policy>
-                <property name="loginPage" value="/login.jsf"></property>
-                <property name="loginErrorPage" value="/loginError.jsf"></property>
-            </provider-config>
-        </message-security-config>
-	<property value="SHA-256" name="default-digest-algorithm" />
-      </security-service>
-      <java-config classpath-suffix="" system-classpath="" debug-options="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=9009">
-        <jvm-options>-XX:MaxPermSize=192m</jvm-options>
-        <jvm-options>-client</jvm-options>
-        <jvm-options>-Djava.awt.headless=true</jvm-options>
-        <jvm-options>-Djdk.corba.allowOutputStreamSubclass=true</jvm-options>
-        <jvm-options>-Djavax.xml.accessExternalSchema=all</jvm-options>
-        <jvm-options>-Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder</jvm-options>
-        <jvm-options>-XX:+UnlockDiagnosticVMOptions</jvm-options>
-        <jvm-options>-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed</jvm-options>
-        <jvm-options>-Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy</jvm-options>
-        <jvm-options>-Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf</jvm-options>
-        <jvm-options>-Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as</jvm-options>
-        <jvm-options>-Xmx512m</jvm-options>
-        <jvm-options>-Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks</jvm-options>
-        <jvm-options>-Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks</jvm-options>
-        <jvm-options>-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext</jvm-options>
-        <jvm-options>-Djdbc.drivers=org.apache.derby.jdbc.ClientDriver</jvm-options>
-		<jvm-options>-DANTLR_USE_DIRECT_CLASS_LOADING=true</jvm-options>
-        <jvm-options>-Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory</jvm-options>
-        <!-- Configure post startup bundle list here. This is a comma separated list of bundle sybolic names. -->
-        <jvm-options>-Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.shell.remote,org.apache.felix.fileinstall</jvm-options>
-        <!-- Configuration of various third-party OSGi bundles like
-             Felix Remote Shell, FileInstall, etc. -->
-        <!-- Port on which remote shell listens for connections.-->
-        <jvm-options>-Dosgi.shell.telnet.port=6666</jvm-options>
-        <!-- How many concurrent users can connect to this remote shell -->
-        <jvm-options>-Dosgi.shell.telnet.maxconn=1</jvm-options>
-        <!-- From which hosts users can connect -->
-        <jvm-options>-Dosgi.shell.telnet.ip=127.0.0.1</jvm-options>
-        <!-- Gogo shell configuration -->
-        <jvm-options>-Dgosh.args=--nointeractive</jvm-options>
-        <!-- Directory being watched by fileinstall. -->
-        <jvm-options>-Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/</jvm-options>
-        <!-- Time period fileinstaller thread in ms. -->
-        <jvm-options>-Dfelix.fileinstall.poll=5000</jvm-options>
-        <!-- log level: 1 for error, 2 for warning, 3 for info and 4 for debug. -->
-        <jvm-options>-Dfelix.fileinstall.log.level=2</jvm-options>
-        <!-- should new bundles be started or installed only? 
-             true => start, false => only install 
-        -->
-        <jvm-options>-Dfelix.fileinstall.bundles.new.start=true</jvm-options>
-        <!-- should watched bundles be started transiently or persistently -->
-        <jvm-options>-Dfelix.fileinstall.bundles.startTransient=true</jvm-options>
-        <!-- Should changes to configuration be saved in corresponding cfg file? false: no, true: yes
-             If we don't set false, everytime server starts from clean osgi cache, the file gets rewritten.
-        -->
-        <jvm-options>-Dfelix.fileinstall.disableConfigSave=false</jvm-options>
-        <!-- End of OSGi bundle configurations -->
-        <jvm-options>-XX:NewRatio=2</jvm-options>
-        <!-- Woodstox property needed to pass StAX TCK -->
-        <jvm-options>-Dcom.ctc.wstx.returnNullForDefaultNamespace=true</jvm-options>
-      </java-config>
-      <network-config>
-        <protocols>
-          <protocol name="http-listener-1">
-            <http default-virtual-server="server" max-connections="250">
-              <file-cache enabled="false"></file-cache>
-            </http>
-          </protocol>
-          <protocol security-enabled="true" name="http-listener-2">
-            <http default-virtual-server="server" max-connections="250">
-              <file-cache enabled="false"></file-cache>
-            </http>
-            <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" ssl3-enabled="false" cert-nickname="s1as"></ssl>
-          </protocol>
-          <protocol name="admin-listener">
-            <http default-virtual-server="__asadmin" max-connections="250" encoded-slash-enabled="true" >
-              <file-cache enabled="false"></file-cache>
-            </http>
-          </protocol>
-        </protocols>
-        <network-listeners>
-          <network-listener port="8080" protocol="http-listener-1" transport="tcp" name="http-listener-1" thread-pool="http-thread-pool"></network-listener>
-          <network-listener port="8181" protocol="http-listener-2" transport="tcp" name="http-listener-2" thread-pool="http-thread-pool"></network-listener>
-          <network-listener port="4848" protocol="admin-listener" transport="tcp" name="admin-listener" thread-pool="admin-thread-pool"></network-listener>
-        </network-listeners>
-        <transports>
-          <transport name="tcp"></transport>
-        </transports>
-      </network-config>
-      <thread-pools>
-          <thread-pool name="admin-thread-pool" max-thread-pool-size="50" max-queue-size="256"></thread-pool>
-          <thread-pool name="http-thread-pool" max-queue-size="4096"></thread-pool>
-          <thread-pool name="thread-pool-1" max-thread-pool-size="200"/>
-  </thread-pools>
-  <!-- try to enable REST monitoring by editing config -->
-      <monitoring-service>
-        <module-monitoring-levels jvm="LOW"></module-monitoring-levels>
-      </monitoring-service>
-    </config>
-     <config name="default-config" dynamic-reconfiguration-enabled="true" >
-         <http-service>
-             <access-log/>
-             <virtual-server id="server" network-listeners="http-listener-1, http-listener-2" >
-                 <property name="default-web-xml" value="${com.sun.aas.instanceRoot}/config/default-web.xml"/>
-             </virtual-server>
-             <virtual-server id="__asadmin" network-listeners="admin-listener" />
-         </http-service>
-         <iiop-service>
-             <orb use-thread-pool-ids="thread-pool-1" />
-             <iiop-listener port="${IIOP_LISTENER_PORT}" id="orb-listener-1" address="0.0.0.0" />
-             <iiop-listener port="${IIOP_SSL_LISTENER_PORT}" id="SSL" address="0.0.0.0" security-enabled="true">
-                 <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" />
-             </iiop-listener>
-             <iiop-listener port="${IIOP_SSL_MUTUALAUTH_PORT}" id="SSL_MUTUALAUTH" address="0.0.0.0" security-enabled="true">
-                 <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="s1as" client-auth-enabled="true" />
-             </iiop-listener>
-         </iiop-service>
-         <admin-service system-jmx-connector-name="system" type="server">
-             <!-- JSR 160  "system-jmx-connector" -->
-             <jmx-connector address="0.0.0.0" auth-realm-name="admin-realm" name="system" port="${JMX_SYSTEM_CONNECTOR_PORT}" protocol="rmi_jrmp" security-enabled="false"/>
-             <!-- JSR 160  "system-jmx-connector" -->
-             <property value="${com.sun.aas.installRoot}/lib/install/applications/admingui.war" name="adminConsoleDownloadLocation" />
-         </admin-service>
-         <web-container>
-             <session-config>
-                 <session-manager>
-                     <manager-properties/>
-                     <store-properties />
-                 </session-manager>
-                 <session-properties />
-             </session-config>
-         </web-container>
-         <ejb-container session-store="${com.sun.aas.instanceRoot}/session-store">
-             <ejb-timer-service />
-         </ejb-container>
-         <mdb-container />
-         <jms-service type="EMBEDDED" default-jms-host="default_JMS_host" addresslist-behavior="priority">
-             <jms-host name="default_JMS_host" host="localhost" port="${JMS_PROVIDER_PORT}" admin-user-name="admin" admin-password="admin" lazy-init="true"/>
-         </jms-service>
-         <log-service log-rotation-limit-in-bytes="2000000" file="${com.sun.aas.instanceRoot}/logs/server.log">
-             <module-log-levels />
-         </log-service>
-         <security-service>
-             <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="admin-realm">
-                 <property name="file" value="${com.sun.aas.instanceRoot}/config/admin-keyfile" />
-                 <property name="jaas-context" value="fileRealm" />
-             </auth-realm>
-             <auth-realm classname="com.sun.enterprise.security.auth.realm.file.FileRealm" name="file">
-                 <property name="file" value="${com.sun.aas.instanceRoot}/config/keyfile" />
-                 <property name="jaas-context" value="fileRealm" />
-             </auth-realm>
-             <auth-realm classname="com.sun.enterprise.security.auth.realm.certificate.CertificateRealm" name="certificate" />
-             <jacc-provider policy-provider="com.sun.enterprise.security.provider.PolicyWrapper" name="default" policy-configuration-factory-provider="com.sun.enterprise.security.provider.PolicyConfigurationFactoryImpl">
-                 <property name="repository" value="${com.sun.aas.instanceRoot}/generated/policy" />
-             </jacc-provider>
-             <jacc-provider policy-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyProvider" name="simple" policy-configuration-factory-provider="com.sun.enterprise.security.jacc.provider.SimplePolicyConfigurationFactory" />
-             <audit-module classname="com.sun.enterprise.security.ee.Audit" name="default">
-                 <property value="false" name="auditOn" />
-             </audit-module>
-             <message-security-config auth-layer="SOAP">
-                 <provider-config provider-type="client" provider-id="XWS_ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
-                     <request-policy auth-source="content" />
-                     <response-policy auth-source="content" />
-                     <property name="encryption.key.alias" value="s1as" />
-                     <property name="signature.key.alias" value="s1as" />
-                     <property name="dynamic.username.password" value="false" />
-                     <property name="debug" value="false" />
-                 </provider-config>
-                 <provider-config provider-type="client" provider-id="ClientProvider" class-name="com.sun.xml.wss.provider.ClientSecurityAuthModule">
-                     <request-policy auth-source="content" />
-                     <response-policy auth-source="content" />
-                     <property name="encryption.key.alias" value="s1as" />
-                     <property name="signature.key.alias" value="s1as" />
-                     <property name="dynamic.username.password" value="false" />
-                     <property name="debug" value="false" />
-                     <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml" />
-                 </provider-config>
-                 <provider-config provider-type="server" provider-id="XWS_ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
-                     <request-policy auth-source="content" />
-                     <response-policy auth-source="content" />
-                     <property name="encryption.key.alias" value="s1as" />
-                     <property name="signature.key.alias" value="s1as" />
-                     <property name="debug" value="false" />
-                 </provider-config>
-                 <provider-config provider-type="server" provider-id="ServerProvider" class-name="com.sun.xml.wss.provider.ServerSecurityAuthModule">
-                     <request-policy auth-source="content" />
-                     <response-policy auth-source="content" />
-                     <property name="encryption.key.alias" value="s1as" />
-                     <property name="signature.key.alias" value="s1as" />
-                     <property name="debug" value="false" />
-                     <property name="security.config" value="${com.sun.aas.instanceRoot}/config/wss-server-config-1.0.xml" />
-                 </provider-config>
-             </message-security-config>
-         </security-service>
-         <transaction-service tx-log-dir="${com.sun.aas.instanceRoot}/logs" automatic-recovery="true" />
-         <diagnostic-service />
-         <java-config debug-options="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=${JAVA_DEBUGGER_PORT}" system-classpath="" classpath-suffix="">
-             <jvm-options>-XX:MaxPermSize=192m</jvm-options>
-             <jvm-options>-server</jvm-options>
-             <jvm-options>-Djava.awt.headless=true</jvm-options>
-             <jvm-options>-Djdk.corba.allowOutputStreamSubclass=true</jvm-options>
-             <jvm-options>-XX:+UnlockDiagnosticVMOptions</jvm-options>
-             <jvm-options>-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed</jvm-options>
-             <jvm-options>-Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy</jvm-options>
-             <jvm-options>-Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf</jvm-options>
-             <jvm-options>-Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as</jvm-options>
-             <jvm-options>-Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks</jvm-options>
-             <jvm-options>-Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks</jvm-options>
-             <jvm-options>-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext</jvm-options>
-             <jvm-options>-Djdbc.drivers=org.apache.derby.jdbc.ClientDriver</jvm-options>
-             <jvm-options>-DANTLR_USE_DIRECT_CLASS_LOADING=true</jvm-options>
-             <jvm-options>-Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory</jvm-options>
-             <jvm-options>-XX:NewRatio=2</jvm-options>
-             <jvm-options>-Xmx512m</jvm-options>
-             <!-- Configure post startup bundle list here. This is a comma separated list of bundle sybolic names.
-                  The remote shell bundle has been disabled for cluster and remote instances. -->
-             <jvm-options>-Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall</jvm-options>
-             <!-- Port on which remote shell listens for connections.-->
-             <jvm-options>-Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT}</jvm-options>
-             <!-- How many concurrent users can connect to this remote shell -->
-             <jvm-options>-Dosgi.shell.telnet.maxconn=1</jvm-options>
-             <!-- From which hosts users can connect -->
-             <jvm-options>-Dosgi.shell.telnet.ip=127.0.0.1</jvm-options>
-             <!-- Gogo shell configuration -->
-             <jvm-options>-Dgosh.args=--noshutdown -c noop=true</jvm-options>
-             <!-- Directory being watched by fileinstall. -->
-             <jvm-options>-Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/</jvm-options>
-             <!-- Time period fileinstaller thread in ms. -->
-             <jvm-options>-Dfelix.fileinstall.poll=5000</jvm-options>
-             <!-- log level: 1 for error, 2 for warning, 3 for info and 4 for debug. -->
-             <jvm-options>-Dfelix.fileinstall.log.level=3</jvm-options>
-             <!-- should new bundles be started or installed only?
-                 true => start, false => only install
-             -->
-             <jvm-options>-Dfelix.fileinstall.bundles.new.start=true</jvm-options>
-             <!-- should watched bundles be started transiently or persistently -->
-             <jvm-options>-Dfelix.fileinstall.bundles.startTransient=true</jvm-options>
-             <!-- Should changes to configuration be saved in corresponding cfg file? false: no, true: yes
-                  If we don't set false, everytime server starts from clean osgi cache, the file gets rewritten.
-             -->
-             <jvm-options>-Dfelix.fileinstall.disableConfigSave=false</jvm-options>
-             <!-- End of OSGi bundle configurations -->
-        </java-config>
-         <availability-service>
-             <web-container-availability/>
-             <ejb-container-availability sfsb-store-pool-name="jdbc/hastore"/>
-             <jms-availability/>
-         </availability-service>
-         <network-config>
-             <protocols>
-                 <protocol name="http-listener-1">
-                     <http default-virtual-server="server">
-                         <file-cache />
-                     </http>
-                 </protocol>
-                 <protocol security-enabled="true" name="http-listener-2">
-                     <http default-virtual-server="server">
-                         <file-cache />
-                     </http>
-                     <ssl classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" ssl3-enabled="false" cert-nickname="s1as" />
-                 </protocol>
-                 <protocol name="admin-listener">
-                     <http default-virtual-server="__asadmin" max-connections="250">
-                         <file-cache enabled="false" />
-                     </http>
-                 </protocol>
-                 <protocol security-enabled="true" name="sec-admin-listener">
-                   <http default-virtual-server="__asadmin" encoded-slash-enabled="true">
-                     <file-cache></file-cache>
-                   </http>
-                   <ssl client-auth="want" ssl3-enabled="false" classname="com.sun.enterprise.security.ssl.GlassfishSSLImpl" cert-nickname="glassfish-instance" renegotiate-on-client-auth-want="false"></ssl>
-                 </protocol>
-                 <protocol name="admin-http-redirect">
-                   <http-redirect secure="true"></http-redirect>
-                 </protocol>
-                 <protocol name="pu-protocol">
-                   <port-unification>
-                     <protocol-finder protocol="sec-admin-listener" name="http-finder" classname="org.glassfish.grizzly.config.portunif.HttpProtocolFinder"></protocol-finder>
-                     <protocol-finder protocol="admin-http-redirect" name="admin-http-redirect" classname="org.glassfish.grizzly.config.portunif.HttpProtocolFinder"></protocol-finder>
-                   </port-unification>
-                 </protocol>
-
-             </protocols>
-             <network-listeners>
-                 <network-listener address="0.0.0.0" port="${HTTP_LISTENER_PORT}" protocol="http-listener-1" transport="tcp" name="http-listener-1" thread-pool="http-thread-pool" />
-                 <network-listener address="0.0.0.0" port="${HTTP_SSL_LISTENER_PORT}" protocol="http-listener-2" transport="tcp" name="http-listener-2" thread-pool="http-thread-pool" />
-                 <network-listener port="${ASADMIN_LISTENER_PORT}" protocol="pu-protocol" transport="tcp" name="admin-listener" thread-pool="http-thread-pool" />
-             </network-listeners>
-             <transports>
-                 <transport name="tcp" />
-             </transports>
-         </network-config>
-         <thread-pools>
-             <thread-pool name="http-thread-pool" />
-             <thread-pool max-thread-pool-size="200" idle-thread-timeout-in-seconds="120" name="thread-pool-1" />
-         </thread-pools>
-         <group-management-service/>
-         <system-property name="JMS_PROVIDER_PORT" value="27676" description="Port Number that JMS Service will listen for remote clients connection." />
-         <system-property name="ASADMIN_LISTENER_PORT" value="24848"/>
-         <system-property name="HTTP_LISTENER_PORT" value="28080"/>
-         <system-property name="HTTP_SSL_LISTENER_PORT" value="28181"/>
-         <system-property name="IIOP_LISTENER_PORT" value="23700"/>
-         <system-property name="IIOP_SSL_LISTENER_PORT" value="23820"/>
-         <system-property name="IIOP_SSL_MUTUALAUTH_PORT" value="23920"/>
-         <system-property name="JMX_SYSTEM_CONNECTOR_PORT" value="28686"/>
-         <system-property name="OSGI_SHELL_TELNET_PORT" value="26666"/>
-         <system-property name="JAVA_DEBUGGER_PORT" value="29009"/>
-     </config>
-  </configs>
-  <property name="administrative.domain.name" value="domain1"/>
-  <secure-admin special-admin-indicator="718fe3ff-df18-49f8-84a0-3aeedb3250db">
-      <secure-admin-principal dn="CN=localhost,OU=GlassFish,O=Oracle Corporation,L=Santa Clara,ST=California,C=US"></secure-admin-principal>
-      <secure-admin-principal dn="CN=localhost-instance,OU=GlassFish,O=Oracle Corporation,L=Santa Clara,ST=California,C=US"></secure-admin-principal>
-  </secure-admin>
-</domain>
diff --git a/conf/docker-aio/dv/install/default.config b/conf/docker-aio/dv/install/default.config
deleted file mode 100644
index 0b806a8714b..00000000000
--- a/conf/docker-aio/dv/install/default.config
+++ /dev/null
@@ -1,15 +0,0 @@
-HOST_DNS_ADDRESS	localhost
-GLASSFISH_DIRECTORY	/opt/glassfish4
-ADMIN_EMAIL	 
-MAIL_SERVER	mail.hmdc.harvard.edu
-POSTGRES_ADMIN_PASSWORD	secret
-POSTGRES_SERVER	db
-POSTGRES_PORT	5432
-POSTGRES_DATABASE	dvndb
-POSTGRES_USER	dvnapp
-POSTGRES_PASSWORD	secret
-SOLR_LOCATION	idx
-RSERVE_HOST	localhost
-RSERVE_PORT	6311
-RSERVE_USER	rserve
-RSERVE_PASSWORD	rserve
diff --git a/conf/docker-aio/dv/pg_hba.conf b/conf/docker-aio/dv/pg_hba.conf
deleted file mode 100644
index 77feba5247d..00000000000
--- a/conf/docker-aio/dv/pg_hba.conf
+++ /dev/null
@@ -1,91 +0,0 @@
-# PostgreSQL Client Authentication Configuration File
-# ===================================================
-#
-# Refer to the "Client Authentication" section in the PostgreSQL
-# documentation for a complete description of this file.  A short
-# synopsis follows.
-#
-# This file controls: which hosts are allowed to connect, how clients
-# are authenticated, which PostgreSQL user names they can use, which
-# databases they can access.  Records take one of these forms:
-#
-# local      DATABASE  USER  METHOD  [OPTIONS]
-# host       DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
-# hostssl    DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
-# hostnossl  DATABASE  USER  ADDRESS  METHOD  [OPTIONS]
-#
-# (The uppercase items must be replaced by actual values.)
-#
-# The first field is the connection type: "local" is a Unix-domain
-# socket, "host" is either a plain or SSL-encrypted TCP/IP socket,
-# "hostssl" is an SSL-encrypted TCP/IP socket, and "hostnossl" is a
-# plain TCP/IP socket.
-#
-# DATABASE can be "all", "sameuser", "samerole", "replication", a
-# database name, or a comma-separated list thereof. The "all"
-# keyword does not match "replication". Access to replication
-# must be enabled in a separate record (see example below).
-#
-# USER can be "all", a user name, a group name prefixed with "+", or a
-# comma-separated list thereof.  In both the DATABASE and USER fields
-# you can also write a file name prefixed with "@" to include names
-# from a separate file.
-#
-# ADDRESS specifies the set of hosts the record matches.  It can be a
-# host name, or it is made up of an IP address and a CIDR mask that is
-# an integer (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that
-# specifies the number of significant bits in the mask.  A host name
-# that starts with a dot (.) matches a suffix of the actual host name.
-# Alternatively, you can write an IP address and netmask in separate
-# columns to specify the set of hosts.  Instead of a CIDR-address, you
-# can write "samehost" to match any of the server's own IP addresses,
-# or "samenet" to match any address in any subnet that the server is
-# directly connected to.
-#
-# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi",
-# "krb5", "ident", "peer", "pam", "ldap", "radius" or "cert".  Note that
-# "password" sends passwords in clear text; "md5" is preferred since
-# it sends encrypted passwords.
-#
-# OPTIONS are a set of options for the authentication in the format
-# NAME=VALUE.  The available options depend on the different
-# authentication methods -- refer to the "Client Authentication"
-# section in the documentation for a list of which options are
-# available for which authentication methods.
-#
-# Database and user names containing spaces, commas, quotes and other
-# special characters must be quoted.  Quoting one of the keywords
-# "all", "sameuser", "samerole" or "replication" makes the name lose
-# its special character, and just match a database or username with
-# that name.
-#
-# This file is read on server startup and when the postmaster receives
-# a SIGHUP signal.  If you edit the file on a running system, you have
-# to SIGHUP the postmaster for the changes to take effect.  You can
-# use "pg_ctl reload" to do that.
-
-# Put your actual configuration here
-# ----------------------------------
-#
-# If you want to allow non-local connections, you need to add more
-# "host" records.  In that case you will also need to make PostgreSQL
-# listen on a non-local interface via the listen_addresses
-# configuration parameter, or via the -i or -h command line switches.
-
-
-
-# TYPE  DATABASE        USER            ADDRESS                 METHOD
-
-# "local" is for Unix domain socket connections only
-#local   all             all                                     peer
-local   all             all                                     trust
-# IPv4 local connections:
-#host    all             all             127.0.0.1/32            trust
-host    all             all             0.0.0.0/0            trust
-# IPv6 local connections:
-host    all             all             ::1/128                 trust
-# Allow replication connections from localhost, by a user with the
-# replication privilege.
-#local   replication     postgres                                peer
-#host    replication     postgres        127.0.0.1/32            ident
-#host    replication     postgres        ::1/128                 ident
diff --git a/conf/docker-aio/entrypoint.bash b/conf/docker-aio/entrypoint.bash
deleted file mode 100755
index 236bb30f67a..00000000000
--- a/conf/docker-aio/entrypoint.bash
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env bash
-export LANG=en_US.UTF-8
-sudo -u postgres /usr/pgsql-13/bin/pg_ctl start -D /var/lib/pgsql/13/data &
-cd /opt/solr-8.11.1/
-# TODO: Run Solr as non-root and remove "-force".
-bin/solr start -force
-bin/solr create_core -c collection1 -d server/solr/collection1/conf -force
-
-# start apache, in both foreground and background...
-apachectl -DFOREGROUND &
-
-# TODO: Run Payara as non-root.
-cd /opt/payara5
-bin/asadmin start-domain --debug
-sleep infinity
-
diff --git a/conf/docker-aio/httpd.conf b/conf/docker-aio/httpd.conf
deleted file mode 100644
index 85c851d785f..00000000000
--- a/conf/docker-aio/httpd.conf
+++ /dev/null
@@ -1,27 +0,0 @@
-
-Include conf.d/*.conf
-Include conf.modules.d/*.conf
-ServerName localhost
-Listen 80 443
-PidFile run/httpd.pid
-DocumentRoot "/var/www/html"
-TypesConfig /etc/mime.types
-User apache
-Group apache
-
-<VirtualHost *:80>
-  ServerName localhost
-  LogLevel debug
- ErrorLog logs/error_log
- LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
- CustomLog logs/access_log combined
- 
-  # proxy config (aka - what to send to glassfish or not)
-  ProxyPassMatch ^/Shibboleth.sso !
-  ProxyPassMatch ^/shibboleth-ds !
-  # pass everything else to Glassfish
-  ProxyPass / ajp://localhost:8009/
-# glassfish can be slow sometimes
-  ProxyTimeout 300 
-
-</VirtualHost>
diff --git a/conf/docker-aio/install.bash b/conf/docker-aio/install.bash
deleted file mode 100755
index 2b3275ad830..00000000000
--- a/conf/docker-aio/install.bash
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-sudo -u postgres createuser --superuser dvnapp
-#./entrypoint.bash &
-unzip dvinstall.zip
-cd dvinstall/
-echo "beginning installer"
-./install -admin_email=dvAdmin@mailinator.com -y -f > install.out 2> install.err
-
-echo "installer complete"
-cat install.err
diff --git a/conf/docker-aio/prep_it.bash b/conf/docker-aio/prep_it.bash
deleted file mode 100755
index adb257e43b1..00000000000
--- a/conf/docker-aio/prep_it.bash
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env bash
-
-# run through all the steps to setup docker-aio to run integration tests
-
-# hard-codes several assumptions: image is named dv0, container is named dv, port is 8084
-
-# glassfish healthy/ready retries
-n_wait=5
-
-cd conf/docker-aio
-./0prep_deps.sh
-./1prep.sh
-docker build -t dv0 -f c8.dockerfile .
-# cleanup from previous runs if necessary
-docker rm -f dv
-# start container
-docker run -d -p 8084:80 -p 8083:8080 -p 9010:9009 --name dv dv0
-# wait for glassfish to be healthy
-i_wait=0
-d_wait=10
-while [ $i_wait -lt $n_wait ]
-do
-	h=`docker inspect -f "{{.State.Health.Status}}" dv`
-	if [ "healthy" == "${h}" ]; then
-		break
-	else
-		sleep $d_wait
-	fi
-	i_wait=$(( $i_wait + 1 ))
-	
-done
-# try setupIT.bash
-docker exec dv /opt/dv/setupIT.bash
-err=$?
-if [ $err -ne 0 ]; then
-	echo "error - setupIT failure"
-	exit 1
-fi
-# configure DOI provider based on docker build arguments / environmental variables
-docker exec dv /opt/dv/configure_doi.bash
-err=$?
-if [ $err -ne 0 ]; then
-	echo "error - DOI configuration failure"
-	exit 1
-fi
-# handle config for the private url test (and things like publishing...)
-./seturl.bash
-
-
-cd ../..
-#echo "docker-aio ready to run integration tests ($i_retry)"
-echo "docker-aio ready to run integration tests"
-curl http://localhost:8084/api/info/version
-echo $?
-
diff --git a/conf/docker-aio/readme.md b/conf/docker-aio/readme.md
deleted file mode 100644
index ef4d3626cf0..00000000000
--- a/conf/docker-aio/readme.md
+++ /dev/null
@@ -1,60 +0,0 @@
-# Docker All-In-One
-
-First pass docker all-in-one image, intended for running integration tests against.
-Also usable for normal development and system evaluation; not intended for production.
-
-### Requirements:
- - java11 compiler, maven, make, wget, docker
-
-### Quickstart:
- - in the root of the repository, run `./conf/docker-aio/prep_it.bash`
- - if using DataCite test credentials, update the build args appropriately.
- - if all goes well, you should see the results of the `api/info/version` endpoint, including the deployed build (eg `{"status":"OK","data":{"version":"4.8.6","build":"develop-c3e9f40"}}`). If not, you may need to read the non-quickstart instructions.
- - run integration tests: `./conf/docker-aio/run-test-suite.sh`
-
-----
-
-## More in-depth documentation:
-
-
-### Initial setup (aka - do once):
-- `cd conf/docker-aio` and run `./0prep_deps.sh` to created Payara and Solr tarballs in `conf/docker-aio/dv/deps`.
-
-### Per-build:
-
-> Note: If you encounter any issues, see the Troubleshooting section at the end of this document.
-
-#### Setup
-
-- `cd conf/docker-aio`, and run `./1prep.sh` to copy files for integration test data into docker build context; `1prep.sh` will also build the war file and installation zip file
-- build the docker image: `docker build -t dv0 -f c8.dockerfile .`
-
-- Run image: `docker run -d -p 8083:8080 -p 8084:80 --name dv dv0` (aka - forward port 8083 locally to 8080 in the container for payara, and 8084 to 80 for apache); if you'd like to connect a java debugger to payara, use `docker run -d -p 8083:8080 -p 8084:80 -p 9010:9009 --name dv dv0`
-
-- Installation (integration test): `docker exec dv /opt/dv/setupIT.bash` 
-  (Note that it's possible to customize the installation by editing `conf/docker-aio/default.config` and running `docker exec dv /opt/dv/install.bash` but for the purposes of integration testing, the `setupIT.bash` script above works fine.)
-
-- update `dataverse.siteUrl` (appears only necessary for `DatasetsIT.testPrivateUrl`): `docker exec dv /usr/local/glassfish4/bin/asadmin create-jvm-options "-Ddataverse.siteUrl=http\://localhost\:8084"` (or use the provided `seturl.bash`)
-
-#### Run integration tests: 
-
-First, cd back to the root of the repo where the `pom.xml` file is (`cd ../..` assuming you're still in the `conf/docker-aio` directory). Then run the test suite with script below:
-
-`conf/docker-aio/run-test-suite.sh`
-
-There isn't any strict requirement on the local port (8083, 8084 in this doc), the name of the image (dv0) or container (dv), these can be changed as desired as long as they are consistent.
-
-### Troubleshooting Notes:
-
-* If Dataverse' build fails due to an error about `Module` being ambiguous, you might be using a Java 9 compiler.
-
-* If you see an error like this: 
- ```
- docker: Error response from daemon: Conflict. The container name "/dv" is already in use by container "5f72a45b68c86c7b0f4305b83ce7d663020329ea4e30fa2a3ce9ddb05223533d"
- You have to remove (or rename) that container to be able to reuse that name.
- ``` 
-    run something like `docker ps -a | grep dv` to see the container left over from the last run and something like `docker rm 5f72a45b68c8` to remove it. Then try the `docker run` command above again.
-
-* `empty reply from server` or `Failed to connect to ::1: Cannot assign requested address` tend to indicate either that you haven't given payara enough time to start, or your docker setup is in an inconsistent state and should probably be restarted.
-
-* For manually fiddling around with the created dataverse, use user `dataverseAdmin` with password `admin1`.
diff --git a/conf/docker-aio/run-test-suite.sh b/conf/docker-aio/run-test-suite.sh
deleted file mode 100755
index 39809a7a50e..00000000000
--- a/conf/docker-aio/run-test-suite.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash
-# This is the canonical list of which "IT" tests are expected to pass.
-
-dvurl=$1
-if [ -z "$dvurl" ]; then
-	dvurl="http://localhost:8084"
-fi
-
-integrationtests=$(<tests/integration-tests.txt)
-
-# Please note the "dataverse.test.baseurl" is set to run for "all-in-one" Docker environment.
-# TODO: Rather than hard-coding the list of "IT" classes here, add a profile to pom.xml.
-mvn test -Dtest=$integrationtests -Ddataverse.test.baseurl=$dvurl
diff --git a/conf/docker-aio/setupIT.bash b/conf/docker-aio/setupIT.bash
deleted file mode 100755
index 528b8f3c5f8..00000000000
--- a/conf/docker-aio/setupIT.bash
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env bash
-
-# do integration-test install and test data setup
-
-cd /opt/dv
-unzip dvinstall.zip
-cd /opt/dv/testdata
-./scripts/deploy/phoenix.dataverse.org/prep
-./db.sh
-./install # modified from phoenix
-/usr/local/glassfish4/glassfish/bin/asadmin deploy /opt/dv/dvinstall/dataverse.war
-./post # modified from phoenix
-
diff --git a/conf/docker-aio/seturl.bash b/conf/docker-aio/seturl.bash
deleted file mode 100755
index a62fb6b3ea7..00000000000
--- a/conf/docker-aio/seturl.bash
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-docker exec dv /usr/local/glassfish4/bin/asadmin create-jvm-options "\"-Ddataverse.siteUrl=http\://localhost\:8084\""
diff --git a/conf/docker-aio/testdata/httpd.conf b/conf/docker-aio/testdata/httpd.conf
deleted file mode 100644
index 85c851d785f..00000000000
--- a/conf/docker-aio/testdata/httpd.conf
+++ /dev/null
@@ -1,27 +0,0 @@
-
-Include conf.d/*.conf
-Include conf.modules.d/*.conf
-ServerName localhost
-Listen 80 443
-PidFile run/httpd.pid
-DocumentRoot "/var/www/html"
-TypesConfig /etc/mime.types
-User apache
-Group apache
-
-<VirtualHost *:80>
-  ServerName localhost
-  LogLevel debug
- ErrorLog logs/error_log
- LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
- CustomLog logs/access_log combined
- 
-  # proxy config (aka - what to send to glassfish or not)
-  ProxyPassMatch ^/Shibboleth.sso !
-  ProxyPassMatch ^/shibboleth-ds !
-  # pass everything else to Glassfish
-  ProxyPass / ajp://localhost:8009/
-# glassfish can be slow sometimes
-  ProxyTimeout 300 
-
-</VirtualHost>
diff --git a/conf/docker-aio/testscripts/db.sh b/conf/docker-aio/testscripts/db.sh
deleted file mode 100755
index f0a9e409fd7..00000000000
--- a/conf/docker-aio/testscripts/db.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-psql -U postgres -c "CREATE ROLE dvnapp PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1
-psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1
diff --git a/conf/docker-aio/testscripts/install b/conf/docker-aio/testscripts/install
deleted file mode 100755
index f87f180b554..00000000000
--- a/conf/docker-aio/testscripts/install
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/sh
-export HOST_ADDRESS=localhost
-export GLASSFISH_ROOT=/opt/payara5
-export FILES_DIR=/opt/payara5/glassfish/domains/domain1/files
-export DB_NAME=dvndb
-export DB_PORT=5432
-export DB_HOST=localhost
-export DB_USER=dvnapp
-export DB_PASS=secret
-export RSERVE_HOST=localhost
-export RSERVE_PORT=6311
-export RSERVE_USER=rserve
-export RSERVE_PASS=rserve
-export SMTP_SERVER=localhost
-export MEM_HEAP_SIZE=2048
-export GLASSFISH_DOMAIN=domain1
-cd scripts/installer
-#cp ../../conf/jhove/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf
-cp /opt/dv/testdata/jhove.conf $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhove.conf
-cp /opt/dv/testdata/jhoveConfig.xsd $GLASSFISH_ROOT/glassfish/domains/$GLASSFISH_DOMAIN/config/jhoveConfig.xsd
-./as-setup.sh dvndb
diff --git a/conf/docker-aio/testscripts/post b/conf/docker-aio/testscripts/post
deleted file mode 100755
index 0f292109d31..00000000000
--- a/conf/docker-aio/testscripts/post
+++ /dev/null
@@ -1,13 +0,0 @@
-#/bin/sh
-cd scripts/api
-./setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out
-cd ../..
-psql -U dvnapp dvndb -f doc/sphinx-guides/source/_static/util/createsequence.sql
-scripts/search/tests/publish-dataverse-root
-#git checkout scripts/api/data/dv-root.json
-scripts/search/tests/grant-authusers-add-on-root
-scripts/search/populate-users
-scripts/search/create-users
-scripts/search/tests/create-all-and-test
-scripts/search/tests/publish-spruce1-and-test
-#java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /var/www/html/schemaspy/latest
diff --git a/conf/docker-dcm/.gitignore b/conf/docker-dcm/.gitignore
deleted file mode 100644
index ac39981ce6a..00000000000
--- a/conf/docker-dcm/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-*.rpm
-upload*.bash
diff --git a/conf/docker-dcm/0prep.sh b/conf/docker-dcm/0prep.sh
deleted file mode 100755
index 300aa39d567..00000000000
--- a/conf/docker-dcm/0prep.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-DCM_VERSION=0.5
-RSAL_VERSION=0.1
-
-if [ ! -e dcm-${DCM_VERSION}-0.noarch.rpm ]; then
-	wget https://github.com/sbgrid/data-capture-module/releases/download/${DCM_VERSION}/dcm-${DCM_VERSION}-0.noarch.rpm
-fi
-
-if [ ! -e rsal-${RSAL_VERSION}-0.noarch.rpm ] ;then
-	wget https://github.com/sbgrid/rsal/releases/download/${RSAL_VERSION}/rsal-${RSAL_VERSION}-0.noarch.rpm
-fi
diff --git a/conf/docker-dcm/c6client.dockerfile b/conf/docker-dcm/c6client.dockerfile
deleted file mode 100644
index e4d1ae7da82..00000000000
--- a/conf/docker-dcm/c6client.dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-# build from repo root
-FROM centos:6
-RUN yum install -y epel-release
-RUN yum install -y rsync openssh-clients jq curl wget lynx
-RUN useradd depositor
-USER depositor
-WORKDIR /home/depositor
diff --git a/conf/docker-dcm/cfg/dcm/bashrc b/conf/docker-dcm/cfg/dcm/bashrc
deleted file mode 100644
index 07137ab8471..00000000000
--- a/conf/docker-dcm/cfg/dcm/bashrc
+++ /dev/null
@@ -1,18 +0,0 @@
-# .bashrc
-
-# User specific aliases and functions
-
-alias rm='rm -i'
-alias cp='cp -i'
-alias mv='mv -i'
-
-# Source global definitions
-if [ -f /etc/bashrc ]; then
-	. /etc/bashrc
-fi
-
-# these are dummy values, obviously
-export UPLOADHOST=dcmsrv
-export DVAPIKEY=burrito
-export DVHOSTINT=dvsrv
-export DVHOST=dvsrv
diff --git a/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh b/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh
deleted file mode 100755
index 0db674bfac4..00000000000
--- a/conf/docker-dcm/cfg/dcm/entrypoint-dcm.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-
-/etc/init.d/sshd start
-/etc/init.d/redis start
-/etc/init.d/rq start
-lighttpd -D -f /etc/lighttpd/lighttpd.conf
diff --git a/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh b/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh
deleted file mode 100755
index 3964a79391e..00000000000
--- a/conf/docker-dcm/cfg/dcm/healthcheck-dcm.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-
-r_rq=`/etc/init.d/rq status`
-if [ "rq_worker running" != "$r_rq" ]; then
-	echo "rq failed"
-	exit 1
-fi
-r_www=`/etc/init.d/lighttpd status`
-e_www=$?
-if [ 0 -ne $e_www ]; then
-	echo "lighttpd failed"
-	exit 2
-fi
-
diff --git a/conf/docker-dcm/cfg/dcm/rq-init-d b/conf/docker-dcm/cfg/dcm/rq-init-d
deleted file mode 100755
index 093cd894376..00000000000
--- a/conf/docker-dcm/cfg/dcm/rq-init-d
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/bash
-
-# chkconfig: 2345 90 60
-# description: rq worker script (single worker process)
-
-# example rq configuration file (to be placed in /etc/init.d)
-
-# works on cent6
-
-DAEMON=rq_worker
-DAEMON_PATH=/opt/dcm/gen/
-export UPLOADHOST=dcmsrv
-VIRTUALENV=
-LOGFILE=/var/log/${DAEMON}.log
-PIDFILE=/var/run/${DAEMON}.pid
-
-case "$1" in
-start)
-	printf "%-50s" "starting $DAEMON..."
-	cd $DAEMON_PATH
-	if [ ! -z "$VIRTUALENV" ]; then
-		source $VIRTUALENV/bin/activate
-	fi
-	rq worker normal --pid $PIDFILE > ${LOGFILE} 2>&1 &
-;;
-status)
-	if [ -f $PIDFILE ]; then
-		PID=`cat $PIDFILE`
-		if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
-			printf "%s\n" "$DAEMON not running, but PID file ($PIDFILE) exists"
-		else
-			echo "$DAEMON running"
-		fi
-	else
-		printf "%s\n" "$DAEMON not running"
-	fi
-;;
-stop)
-	printf "%-50s" "stopping $DAEMON"
-	if [ -f $PIDFILE ]; then
-		PID=`cat $PIDFILE`
-		kill -HUP $PID
-		rm -f $PIDFILE
-	else
-		printf "%s\n" "no PID file ($PIDFILE) - maybe not running"
-	fi
-;;
-restart)
-	$0 stop
-	$0 start
-;;
-
-*)
-	echo "Usage: $0 {status|start|stop|restart}"
-	exit 1
-esac
-
diff --git a/conf/docker-dcm/cfg/dcm/test_install.sh b/conf/docker-dcm/cfg/dcm/test_install.sh
deleted file mode 100755
index 3026ceb9fa5..00000000000
--- a/conf/docker-dcm/cfg/dcm/test_install.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-
-cp /etc/dcm/rq-init-d /etc/init.d/rq
-cp /etc/dcm/lighttpd-conf-dcm /etc/lighttpd/lighttpd.conf
-cp /etc/dcm/lighttpd-modules-dcm /etc/lighttpd/modules.conf
-cp /etc/dcm/dcm-rssh.conf /etc/rssh.conf
-
diff --git a/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh b/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh
deleted file mode 100755
index 92466c3bd4b..00000000000
--- a/conf/docker-dcm/cfg/rsal/entrypoint-rsal.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-
-#/usr/bin/rsync --no-detach --daemon --config /etc/rsyncd.conf
-/usr/bin/rsync --daemon --config /etc/rsyncd.conf
-lighttpd -D -f /etc/lighttpd/lighttpd.conf
diff --git a/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf b/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf
deleted file mode 100644
index cdb1438af82..00000000000
--- a/conf/docker-dcm/cfg/rsal/lighttpd-modules.conf
+++ /dev/null
@@ -1,174 +0,0 @@
-#######################################################################
-##
-## ansible managed
-#
-##  Modules to load
-## -----------------
-##
-## at least mod_access and mod_accesslog should be loaded
-## all other module should only be loaded if really neccesary
-##
-## - saves some time
-## - saves memory
-##
-## the default module set contains:
-##
-## "mod_indexfile", "mod_dirlisting", "mod_staticfile"
-##
-## you dont have to include those modules in your list
-##
-## Modules, which are pulled in via conf.d/*.conf
-##
-## NOTE: the order of modules is important.
-##
-## - mod_accesslog     -> conf.d/access_log.conf
-## - mod_compress      -> conf.d/compress.conf
-## - mod_status        -> conf.d/status.conf
-## - mod_webdav        -> conf.d/webdav.conf
-## - mod_cml           -> conf.d/cml.conf
-## - mod_evhost        -> conf.d/evhost.conf
-## - mod_simple_vhost  -> conf.d/simple_vhost.conf
-## - mod_mysql_vhost   -> conf.d/mysql_vhost.conf
-## - mod_trigger_b4_dl -> conf.d/trigger_b4_dl.conf
-## - mod_userdir       -> conf.d/userdir.conf
-## - mod_rrdtool       -> conf.d/rrdtool.conf
-## - mod_ssi           -> conf.d/ssi.conf
-## - mod_cgi           -> conf.d/cgi.conf
-## - mod_scgi          -> conf.d/scgi.conf
-## - mod_fastcgi       -> conf.d/fastcgi.conf
-## - mod_proxy         -> conf.d/proxy.conf
-## - mod_secdownload   -> conf.d/secdownload.conf
-## - mod_expire        -> conf.d/expire.conf
-##
-
-server.modules = (
-  "mod_access",
-#  "mod_alias",
-#  "mod_auth",
-#  "mod_evasive",
-#  "mod_redirect",
-#  "mod_rewrite",
-#  "mod_setenv",
-#  "mod_usertrack",
-)
-
-##
-#######################################################################
-
-#######################################################################
-##
-##  Config for various Modules
-##
-
-##
-## mod_ssi
-##
-#include "conf.d/ssi.conf"
-
-##
-## mod_status
-##
-#include "conf.d/status.conf"
-
-##
-## mod_webdav
-##
-#include "conf.d/webdav.conf"
-
-##
-## mod_compress
-##
-#include "conf.d/compress.conf"
-
-##
-## mod_userdir
-##
-#include "conf.d/userdir.conf"
-
-##
-## mod_magnet
-##
-#include "conf.d/magnet.conf"
-
-##
-## mod_cml
-##
-#include "conf.d/cml.conf"
-
-##
-## mod_rrdtool
-##
-#include "conf.d/rrdtool.conf"
-
-##
-## mod_proxy
-##
-#include "conf.d/proxy.conf"
-
-##
-## mod_expire
-##
-#include "conf.d/expire.conf"
-
-##
-## mod_secdownload
-##
-#include "conf.d/secdownload.conf"
-
-##
-#######################################################################
-
-#######################################################################
-##
-## CGI modules
-##
-
-##
-## SCGI (mod_scgi)
-##
-#include "conf.d/scgi.conf"
-
-##
-## FastCGI (mod_fastcgi)
-##
-#include "conf.d/fastcgi.conf"
-
-##
-## plain old CGI (mod_cgi)
-##
-include "conf.d/cgi.conf"
-
-##
-#######################################################################
-
-#######################################################################
-##
-## VHost Modules
-##
-##  Only load ONE of them!
-## ========================
-##
-
-##
-## You can use conditionals for vhosts aswell.
-## 
-## see http://www.lighttpd.net/documentation/configuration.html
-##
-
-##
-## mod_evhost
-##
-#include "conf.d/evhost.conf"
-
-##
-## mod_simple_vhost
-##
-#include "conf.d/simple_vhost.conf"
-
-##
-## mod_mysql_vhost
-##
-#include "conf.d/mysql_vhost.conf"
-
-##
-#######################################################################
diff --git a/conf/docker-dcm/cfg/rsal/lighttpd.conf b/conf/docker-dcm/cfg/rsal/lighttpd.conf
deleted file mode 100644
index 5874d60eb48..00000000000
--- a/conf/docker-dcm/cfg/rsal/lighttpd.conf
+++ /dev/null
@@ -1,43 +0,0 @@
-## lighttpd configuration customized for RSAL; centos7
-
-# refuse connections not from frontend or localhost
-# DO NOT HAVE THIS OPEN TO THE WORLD!!!
-#$HTTP["remoteip"] !~ "192.168.2.2|127.0.0.1" {
-#url.access-deny = ("")
-#}
-server.breakagelog = "/var/log/lighttpd/breakage.log"
-
-#######################################################################
-##
-## Some Variable definition which will make chrooting easier.
-##
-## if you add a variable here. Add the corresponding variable in the
-## chroot example aswell.
-##
-var.log_root    = "/var/log/lighttpd"
-var.server_root = "/opt/rsal/api"
-var.state_dir   = "/var/run"
-var.home_dir    = "/var/lib/lighttpd"
-var.conf_dir    = "/etc/lighttpd"
-
-var.cache_dir   = "/var/cache/lighttpd"
-var.socket_dir  = home_dir + "/sockets"
-include "modules.conf"
-server.port = 80
-server.use-ipv6 = "disable"
-server.username  = "lighttpd"
-server.groupname = "lighttpd"
-server.document-root = server_root 
-server.pid-file = state_dir + "/lighttpd.pid"
-server.errorlog             = log_root + "/error.log"
-include "conf.d/access_log.conf"
-include "conf.d/debug.conf"
-server.event-handler = "linux-sysepoll"
-server.network-backend = "linux-sendfile"
-server.stat-cache-engine = "simple"
-server.max-connections = 1024
-static-file.exclude-extensions = ( ".php", ".pl", ".fcgi", ".scgi" )
-include "conf.d/mime.conf"
-include "conf.d/dirlisting.conf"
-server.follow-symlink = "enable"
-server.upload-dirs = ( "/var/tmp" )
diff --git a/conf/docker-dcm/cfg/rsal/rsyncd.conf b/conf/docker-dcm/cfg/rsal/rsyncd.conf
deleted file mode 100644
index 5a15ab28a12..00000000000
--- a/conf/docker-dcm/cfg/rsal/rsyncd.conf
+++ /dev/null
@@ -1,8 +0,0 @@
-lock file=/var/run/rsync.lock
-log file=/var/log/rsyncd.log
-pid file=/var/log/rsyncd.pid
-
-[10.5072]
- path=/public/
- read only=yes
-
diff --git a/conf/docker-dcm/configure_dcm.sh b/conf/docker-dcm/configure_dcm.sh
deleted file mode 100755
index 5b65b0a0314..00000000000
--- a/conf/docker-dcm/configure_dcm.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/sh
-
-echo "dcm configs on dv side to be done"
-
-# in homage to dataverse traditions, reset to insecure "burrito" admin API key
-sudo -u postgres psql -c "update apitoken set tokenstring='burrito' where id=1;" dvndb
-sudo -u postgres psql -c "update authenticateduser set superuser='t' where id=1;" dvndb
-
-# dataverse configs for DCM
-curl -X PUT -d "SHA-1" "http://localhost:8080/api/admin/settings/:FileFixityChecksumAlgorithm"
-curl -X PUT "http://localhost:8080/api/admin/settings/:UploadMethods" -d "dcm/rsync+ssh"
-curl -X PUT "http://localhost:8080/api/admin/settings/:DataCaptureModuleUrl" -d "http://dcmsrv"
-
-# configure for RSAL downloads; but no workflows or RSAL yet
-curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "rsal/rsync"
-
-# publish root dataverse
-curl -X POST -H "X-Dataverse-key: burrito" "http://localhost:8080/api/dataverses/root/actions/:publish"
-
-# symlink `hold` volume 
-mkdir -p /usr/local/glassfish4/glassfish/domains/domain1/files/
-ln -s /hold /usr/local/glassfish4/glassfish/domains/domain1/files/10.5072
-
-# need to set siteUrl
-cd /usr/local/glassfish4
-bin/asadmin create-jvm-options "\"-Ddataverse.siteUrl=http\://localhost\:8084\""
diff --git a/conf/docker-dcm/configure_rsal.sh b/conf/docker-dcm/configure_rsal.sh
deleted file mode 100755
index 5db43a34381..00000000000
--- a/conf/docker-dcm/configure_rsal.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/sh
-
-fn=rsal-workflow2.json
-# needs an actual IP (vs a hostname) for whitelist
-rsalip=`dig +short rsalsrv`
-
-# create workflow
-curl -s -X POST -H "Content-type: application/json" -d @${fn} "http://localhost:8080/api/admin/workflows" 
-
-# put rsal on the whitelist
-curl -X PUT -d "127.0.0.1;${rsalip}" "http://localhost:8080/api/admin/workflows/ip-whitelist"
-
-# set workflow as default
-curl -X PUT -d "1" "http://localhost:8080/api/admin/workflows/default/PrePublishDataset"
-
-# local access path
-curl -X PUT -d "/hpc/storage" "http://localhost:8080/api/admin/settings/:LocalDataAccessPath"
-
-# storage sites
-curl -X POST -H "Content-type: application/json" --upload-file site-primary.json "http://localhost:8080/api/admin/storageSites"
-curl -X POST -H "Content-type: application/json" --upload-file site-remote.json "http://localhost:8080/api/admin/storageSites"
diff --git a/conf/docker-dcm/create.bash b/conf/docker-dcm/create.bash
deleted file mode 100755
index 58ae6e61dc7..00000000000
--- a/conf/docker-dcm/create.bash
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-
-
-# user creates dataset
-k_d=burrito
-dv_d=root
-h=http://dvsrv
-
-fn=dataset.json
-#dset_id=`curl -s -H "X-Dataverse-key: $k_d" -X POST --upload-file $fn $h/api/dataverses/$dv_d/datasets | jq .data.id`
-r=`curl -s -H "X-Dataverse-key: $k_d" -X POST --upload-file $fn $h/api/dataverses/$dv_d/datasets`
-echo $r
-dset_id=`echo $r | jq .data.id`
-echo "dataset created with id: $dset_id"
-
-if [ "null" == "${dset_id}" ]; then
-	echo "error - no dataset id from create command"
-	exit 1
-fi
-echo "dataset created; internal/db id: ${dset_id}"
-
-
diff --git a/conf/docker-dcm/dataset.json b/conf/docker-dcm/dataset.json
deleted file mode 100644
index fb1b734ed40..00000000000
--- a/conf/docker-dcm/dataset.json
+++ /dev/null
@@ -1,126 +0,0 @@
-{
-    "datasetVersion": {
-      "metadataBlocks": {
-        "citation": {
-          "displayName": "Citation Metadata",
-          "fields": [
-            {
-              "typeName": "title",
-              "multiple": false,
-              "typeClass": "primitive",
-              "value": "DCM test dataset"
-            },
-            {
-              "typeName": "productionDate",
-              "multiple": false,
-              "typeClass": "primitive",
-              "value": "2017-04-01"
-            },
-            {
-              "typeName": "dsDescription",
-              "multiple": true,
-              "typeClass": "compound",
-              "value": [
-                {
-                  "dsDescriptionValue": {
-                    "typeName": "dsDescriptionValue",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "this would normally be a dataset large enough to require a DCM"
-                  }
-                }
-              ]
-            },
-            {
-              "typeName": "depositor",
-              "multiple": false,
-              "typeClass": "primitive",
-              "value": "Doc, Bob"
-            },
-            {
-              "typeName": "producer",
-              "multiple": true,
-              "typeClass": "compound",
-              "value": [
-                {
-                  "producerName": {
-                    "typeName": "producerName",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "Prof, Arthor"
-                  },
-                  "producerAffiliation": {
-                    "typeName": "producerAffiliation",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "LibraScholar"
-                  }
-                }
-              ]
-            },
-            {
-              "typeName": "author",
-              "multiple": true,
-              "typeClass": "compound",
-              "value": [
-                {
-                  "authorName": {
-                    "typeName": "authorName",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "Student, Carol"
-                  }
-                ,
-		  "authorAffiliation": {
-		   "typeName": "authorAffiliation",
-		   "multiple": false,
-		   "typeClass": "primitive",
-		   "value": "LibraScholar"
-		  }
-		},
-                {
-                  "authorName": {
-                    "typeName": "authorName",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "Doc, Bob"
-                  }
-                ,
-		  "authorAffiliation": {
-		   "typeName": "authorAffiliation",
-		   "multiple": false,
-		   "typeClass": "primitive",
-		   "value": "LibraScholar"
-		  }
-		}
-		
-              ]
-            },
-            {
-              "typeName": "datasetContact",
-              "multiple": true,
-              "typeClass": "compound",
-              "value": [
-                {
-                  "datasetContactEmail": {
-                    "typeName": "datasetContactEmail",
-                    "multiple": false,
-                    "typeClass": "primitive",
-                    "value": "dsContact@mailinator.com"
-                  }
-                }
-              ]
-            },
-            {
-              "typeName": "subject",
-              "multiple": true,
-              "typeClass": "controlledVocabulary",
-              "value": [
-                "Medicine, Health and Life Sciences"
-              ]
-            }
-          ]
-        }
-      }
-  }
-}
diff --git a/conf/docker-dcm/dcmsrv.dockerfile b/conf/docker-dcm/dcmsrv.dockerfile
deleted file mode 100644
index 9989fa3a89d..00000000000
--- a/conf/docker-dcm/dcmsrv.dockerfile
+++ /dev/null
@@ -1,21 +0,0 @@
-# build from repo root
-FROM centos:6
-RUN yum install -y epel-release
-ARG RPMFILE=dcm-0.5-0.noarch.rpm
-COPY ${RPMFILE} /tmp/
-COPY cfg/dcm/bashrc /root/.bashrc
-COPY cfg/dcm/test_install.sh /root/
-RUN yum localinstall -y /tmp/${RPMFILE}
-RUN pip install -r /opt/dcm/requirements.txt
-RUN pip install awscli==1.15.75
-run export PATH=~/.local/bin:$PATH
-RUN /root/test_install.sh
-COPY cfg/dcm/rq-init-d /etc/init.d/rq
-RUN useradd glassfish
-COPY cfg/dcm/entrypoint-dcm.sh /
-COPY cfg/dcm/healthcheck-dcm.sh /
-EXPOSE 80
-EXPOSE 22
-VOLUME /hold
-HEALTHCHECK CMD /healthcheck-dcm.sh
-CMD ["/entrypoint-dcm.sh"]
diff --git a/conf/docker-dcm/docker-compose.yml b/conf/docker-dcm/docker-compose.yml
deleted file mode 100644
index 49d4467d349..00000000000
--- a/conf/docker-dcm/docker-compose.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-# initial docker-compose file for combined Dataverse and DCM with shared filesystem
-
-version: '3'
-
-services:
-  dcmsrv:
-    build:
-      context: .
-      dockerfile: dcmsrv.dockerfile
-    container_name: dcmsrv
-    volumes:
-      - hold:/hold
-  rsalsrv:
-    build:
-      context: .
-      dockerfile: rsalsrv.dockerfile
-    container_name: rsalsrv
-#    image: rsalrepo_rsal
-    volumes:
-      - hold:/hold
-      - ./:/mnt
-    environment:
-      DV_HOST: http://dvsrv:8080
-      DV_APIKEY: burrito
-    ports:
-      - "8889:80"
-      - "873:873"
-  dvsrv:
-    build:
-      context: .
-      dockerfile: dv0dcm.dockerfile
-    container_name: dvsrv
-    volumes:
-      - hold:/hold
-      - ./:/mnt
-    ports:
-      - "8083:8080"
-      - "8084:80"
-  client:
-    build:
-      context: .
-      dockerfile: c6client.dockerfile
-    command: sleep infinity
-    container_name: dcm_client
-    volumes:
-      - ./:/mnt
-
-volumes:
-  hold:
-
diff --git a/conf/docker-dcm/dv0dcm.dockerfile b/conf/docker-dcm/dv0dcm.dockerfile
deleted file mode 100644
index 021534c8978..00000000000
--- a/conf/docker-dcm/dv0dcm.dockerfile
+++ /dev/null
@@ -1,7 +0,0 @@
-# dv0 assumed to be image name for docker-aio
-FROM dv0
-RUN yum install -y bind-utils
-COPY configure_dcm.sh /opt/dv/
-COPY configure_rsal.sh /opt/dv/
-COPY rsal-workflow2.json site-primary.json site-remote.json /opt/dv/
-VOLUME /hold
diff --git a/conf/docker-dcm/get_transfer.bash b/conf/docker-dcm/get_transfer.bash
deleted file mode 100755
index 42080f536e1..00000000000
--- a/conf/docker-dcm/get_transfer.bash
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-# user gets transfer script
-
-dset_id=$1
-if [ -z "$dset_id" ]; then
-	echo "no dataset id specified, bailing out"
-	exit 1
-fi
-
-k_d=burrito
-dv_d=root
-
-h=http://dvsrv
-
-#get upload script from DCM
-wget --header "X-Dataverse-key: ${k_d}" ${h}/api/datasets/${dset_id}/dataCaptureModule/rsync -O upload-${dset_id}.bash
-
-
diff --git a/conf/docker-dcm/publish_major.bash b/conf/docker-dcm/publish_major.bash
deleted file mode 100755
index 6a3fd1288ca..00000000000
--- a/conf/docker-dcm/publish_major.bash
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-
-# publish dataset based on database id
-
-dset_id=$1
-if [ -z "$dset_id" ]; then
-	echo "no dataset id specified, bailing out"
-	exit 1
-fi
-
-k_d=burrito
-
-h=http://dvsrv
-
-curl -X POST -H "X-Dataverse-key: ${k_d}" "${h}/api/datasets/${dset_id}/actions/:publish?type=major"
-
-
diff --git a/conf/docker-dcm/readme.md b/conf/docker-dcm/readme.md
deleted file mode 100644
index 3e6a15e61d6..00000000000
--- a/conf/docker-dcm/readme.md
+++ /dev/null
@@ -1,26 +0,0 @@
-This docker-compose setup is intended for use in development, small scale evaluation, and potentially serve as an example of a working (although not production security level) configuration.
-
-Setup:
-
-- build docker-aio image with name dv0 as described in `../docker-aio` (don't start up the docker image or run setupIT.bash)
-- work in the `conf/docker-dcm` directory for below commands
-- download/prepare dependencies: `./0prep.sh`
-- build dcm/dv0dcm images with docker-compose: `docker-compose -f docker-compose.yml build`
-- start containers: `docker-compose -f docker-compose.yml up -d`
-- wait for container to show "healthy" (aka - `docker ps`), then run dataverse app installation: `docker exec dvsrv /opt/dv/install.bash`
-- for development, you probably want to use the `FAKE` DOI provider: `docker exec -it dvsrv /opt/dv/configure_doi.bash`
-- configure dataverse application to use DCM: `docker exec -it dvsrv /opt/dv/configure_dcm.sh`
-- configure dataverse application to use RSAL (if desired): `docker exec -it dvsrv /opt/dv/configure_rsal.sh`
-
-Operation:
-The dataverse installation is accessible at `http://localhost:8084`.
-The `dcm_client` container is intended to be used for executing transfer scripts, and `conf/docker-dcm` is available at `/mnt` inside the container; this container can be accessed with `docker exec -it dcm_client bash`.
-The DCM cron job is NOT configured here; for development purposes the DCM checks can be run manually with `docker exec -it dcmsrv /opt/dcm/scn/post_upload.bash`.
-The RSAL cron job is similarly NOT configured; for development purposes `docker exec -it rsalsrv /opt/rsal/scn/pub.py` can be run manually.
-
-
-Cleanup:
-- shutdown/cleanup `docker-compose -f docker-compose.yml down -v`
-
-For reference, this configuration was working with docker 17.09 / docker-compose 1.16.
-
diff --git a/conf/docker-dcm/rsal-workflow2.json b/conf/docker-dcm/rsal-workflow2.json
deleted file mode 100644
index 322d3ecbcf7..00000000000
--- a/conf/docker-dcm/rsal-workflow2.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-    "name": "RSAL file move for publication",
-    "steps": [
-        {
-            "provider":":internal",
-            "stepType":"log",
-            "parameters": {
-                "message": "Pre-http request"
-            }
-        },
-        {
-            "provider":":internal",
-            "stepType":"http/sr",
-            "parameters": {
-                "url":"http://rsalsrv/rr.py",
-                "method":"POST",
-                "contentType":"text/plain",
-                "body":"${invocationId}\ndataset.id=${dataset.id}\ndataset.identifier=${dataset.identifier}\ndataset.globalId=${dataset.globalId}",
-                "expectedResponse":"OK.*",
-                "rollbackMethod":"DELETE"
-            }
-        },
-        {
-            "provider":":internal",
-            "stepType":"log",
-            "parameters": {
-                "message": "Post-http request"
-            }
-        }
-    ]
-}
diff --git a/conf/docker-dcm/rsalsrv.dockerfile b/conf/docker-dcm/rsalsrv.dockerfile
deleted file mode 100644
index 844432afe6b..00000000000
--- a/conf/docker-dcm/rsalsrv.dockerfile
+++ /dev/null
@@ -1,20 +0,0 @@
-FROM centos:7
-ARG RPMFILE=rsal-0.1-0.noarch.rpm
-RUN yum update; yum install -y epel-release 
-COPY ${RPMFILE} /tmp/
-RUN yum localinstall -y /tmp/${RPMFILE}
-COPY cfg/rsal/rsyncd.conf /etc/rsyncd.conf
-COPY cfg/rsal/entrypoint-rsal.sh /entrypoint.sh
-COPY cfg/rsal/lighttpd-modules.conf /etc/lighttpd/modules.conf
-COPY cfg/rsal/lighttpd.conf /etc/lighttpd/lighttpd.conf
-RUN mkdir -p /public/FK2 
-RUN pip2 install -r /opt/rsal/scn/requirements.txt
-#COPY doc/testdata/ /hold/
-ARG DV_HOST=http://dv_srv:8080
-ARG DV_API_KEY=burrito
-ENV DV_HOST ${DV_HOST}
-ENV DV_API_KEY ${DV_API_KEY}
-EXPOSE 873
-EXPOSE 80
-HEALTHCHECK CMD curl --fail http://localhost/hw.py || exit 1
-CMD ["/entrypoint.sh"]
diff --git a/conf/docker-dcm/site-primary.json b/conf/docker-dcm/site-primary.json
deleted file mode 100644
index 35b217edffd..00000000000
--- a/conf/docker-dcm/site-primary.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-	"hostname": "rsalsrv",
-	"name": "LibraScholar University",
-	"primaryStorage": true,
-	"transferProtocols": "rsync,posix"
-}
diff --git a/conf/docker-dcm/site-remote.json b/conf/docker-dcm/site-remote.json
deleted file mode 100644
index d47c3ef4dda..00000000000
--- a/conf/docker-dcm/site-remote.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-	"hostname": "remote.libra.research",
-	"name": "LibraResearch Institute",
-	"primaryStorage": false,
-	"transferProtocols": "rsync"
-}
diff --git a/conf/jhove/jhove.conf b/conf/jhove/jhove.conf
index 5134ae0f81a..971c60acfaa 100644
--- a/conf/jhove/jhove.conf
+++ b/conf/jhove/jhove.conf
@@ -3,7 +3,7 @@
  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xmlns="http://hul.harvard.edu/ois/xml/ns/jhove/jhoveConfig"
  xsi:schemaLocation="http://hul.harvard.edu/ois/xml/ns/jhove/jhoveConfig
-                     file:///usr/local/payara5/glassfish/domains/domain1/config/jhoveConfig.xsd">
+                     file:///usr/local/payara6/glassfish/domains/domain1/config/jhoveConfig.xsd">
  <jhoveHome>/usr/local/src/jhove</jhoveHome>
  <defaultEncoding>utf-8</defaultEncoding>
  <tempDirectory>/tmp</tempDirectory>
diff --git a/conf/keycloak/docker-compose.yml b/conf/keycloak/docker-compose.yml
index 2776f6572df..12b2382bd3d 100644
--- a/conf/keycloak/docker-compose.yml
+++ b/conf/keycloak/docker-compose.yml
@@ -3,13 +3,15 @@ version: "3.9"
 services:
 
   keycloak:
-    image: 'jboss/keycloak:16.1.1'
+    image: 'quay.io/keycloak/keycloak:21.0'
+    command:
+      - "start-dev"
+      - "--import-realm"
     environment:
-      - KEYCLOAK_USER=kcadmin
-      - KEYCLOAK_PASSWORD=kcpassword
-      - KEYCLOAK_IMPORT=/tmp/oidc-realm.json
+      - KEYCLOAK_ADMIN=kcadmin
+      - KEYCLOAK_ADMIN_PASSWORD=kcpassword
       - KEYCLOAK_LOGLEVEL=DEBUG
     ports:
       - "8090:8080"
     volumes:
-      - './oidc-realm.json:/tmp/oidc-realm.json'
+      - './test-realm.json:/opt/keycloak/data/import/test-realm.json'
diff --git a/conf/keycloak/oidc-keycloak-auth-provider.json b/conf/keycloak/oidc-keycloak-auth-provider.json
index bc70640212d..7e01bd4c325 100644
--- a/conf/keycloak/oidc-keycloak-auth-provider.json
+++ b/conf/keycloak/oidc-keycloak-auth-provider.json
@@ -3,6 +3,6 @@
   "factoryAlias": "oidc",
   "title": "OIDC-Keycloak",
   "subtitle": "OIDC-Keycloak",
-  "factoryData": "type: oidc | issuer: http://localhost:8090/auth/realms/oidc-realm | clientId: oidc-client | clientSecret: ss6gE8mODCDfqesQaSG3gwUwZqZt547E",
+  "factoryData": "type: oidc | issuer: http://keycloak.mydomain.com:8090/realms/test | clientId: test | clientSecret: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8",
   "enabled": true
 }
diff --git a/conf/keycloak/oidc-realm.json b/conf/keycloak/oidc-realm.json
deleted file mode 100644
index 1b77f2b4384..00000000000
--- a/conf/keycloak/oidc-realm.json
+++ /dev/null
@@ -1,2108 +0,0 @@
-{
-  "id": "oidc-realm",
-  "realm": "oidc-realm",
-  "notBefore": 0,
-  "defaultSignatureAlgorithm": "RS256",
-  "revokeRefreshToken": false,
-  "refreshTokenMaxReuse": 0,
-  "accessTokenLifespan": 300,
-  "accessTokenLifespanForImplicitFlow": 900,
-  "ssoSessionIdleTimeout": 1800,
-  "ssoSessionMaxLifespan": 36000,
-  "ssoSessionIdleTimeoutRememberMe": 0,
-  "ssoSessionMaxLifespanRememberMe": 0,
-  "offlineSessionIdleTimeout": 2592000,
-  "offlineSessionMaxLifespanEnabled": false,
-  "offlineSessionMaxLifespan": 5184000,
-  "clientSessionIdleTimeout": 0,
-  "clientSessionMaxLifespan": 0,
-  "clientOfflineSessionIdleTimeout": 0,
-  "clientOfflineSessionMaxLifespan": 0,
-  "accessCodeLifespan": 60,
-  "accessCodeLifespanUserAction": 300,
-  "accessCodeLifespanLogin": 1800,
-  "actionTokenGeneratedByAdminLifespan": 43200,
-  "actionTokenGeneratedByUserLifespan": 300,
-  "oauth2DeviceCodeLifespan": 600,
-  "oauth2DevicePollingInterval": 5,
-  "enabled": true,
-  "sslRequired": "external",
-  "registrationAllowed": false,
-  "registrationEmailAsUsername": false,
-  "rememberMe": false,
-  "verifyEmail": false,
-  "loginWithEmailAllowed": true,
-  "duplicateEmailsAllowed": false,
-  "resetPasswordAllowed": false,
-  "editUsernameAllowed": false,
-  "bruteForceProtected": false,
-  "permanentLockout": false,
-  "maxFailureWaitSeconds": 900,
-  "minimumQuickLoginWaitSeconds": 60,
-  "waitIncrementSeconds": 60,
-  "quickLoginCheckMilliSeconds": 1000,
-  "maxDeltaTimeSeconds": 43200,
-  "failureFactor": 30,
-  "roles": {
-    "realm": [
-      {
-        "id": "13d76240-fcf8-4361-9dbf-de268717cfb2",
-        "name": "uma_authorization",
-        "description": "${role_uma_authorization}",
-        "composite": false,
-        "clientRole": false,
-        "containerId": "oidc-realm",
-        "attributes": {}
-      },
-      {
-        "id": "88b414c4-3516-4486-8f8b-a811ed0e0ce5",
-        "name": "default-roles-oidc-realm",
-        "description": "${role_default-roles}",
-        "composite": true,
-        "composites": {
-          "realm": [
-            "offline_access",
-            "uma_authorization"
-          ]
-        },
-        "clientRole": false,
-        "containerId": "oidc-realm",
-        "attributes": {}
-      },
-      {
-        "id": "b907fd4e-0e54-461c-9411-3f736eef7d2f",
-        "name": "offline_access",
-        "description": "${role_offline-access}",
-        "composite": false,
-        "clientRole": false,
-        "containerId": "oidc-realm",
-        "attributes": {}
-      }
-    ],
-    "client": {
-      "realm-management": [
-        {
-          "id": "39342ea9-0b4e-4841-8996-433759e9297f",
-          "name": "create-client",
-          "description": "${role_create-client}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "f8680034-617d-45d3-9801-7bf0d704c549",
-          "name": "manage-users",
-          "description": "${role_manage-users}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "b08e4cc3-71e2-4395-b66b-fb1277b48b88",
-          "name": "manage-realm",
-          "description": "${role_manage-realm}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "c15dc407-d012-43af-9a21-a2923e1d7b74",
-          "name": "manage-events",
-          "description": "${role_manage-events}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "66c07cb7-42cd-4155-8485-6cc7bd37cba9",
-          "name": "view-realm",
-          "description": "${role_view-realm}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "0419515f-4ab8-43ca-ac69-e842195813c0",
-          "name": "view-events",
-          "description": "${role_view-events}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "aa553d5a-b2dc-4f81-979a-2af0a019fee0",
-          "name": "impersonation",
-          "description": "${role_impersonation}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "9567e1e9-b755-43a8-93ed-d5929391316f",
-          "name": "manage-clients",
-          "description": "${role_manage-clients}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "e3dab69f-7323-4aad-bf98-8b7697f36d57",
-          "name": "query-users",
-          "description": "${role_query-users}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "ee8a4855-d0d5-4261-bdba-b419d304a824",
-          "name": "query-groups",
-          "description": "${role_query-groups}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "4f251212-e922-4ac0-9cce-3ada607648d2",
-          "name": "view-identity-providers",
-          "description": "${role_view-identity-providers}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "34e1dc59-a975-424f-887b-52465e184a4b",
-          "name": "realm-admin",
-          "description": "${role_realm-admin}",
-          "composite": true,
-          "composites": {
-            "client": {
-              "realm-management": [
-                "create-client",
-                "manage-users",
-                "manage-realm",
-                "manage-events",
-                "view-realm",
-                "view-events",
-                "impersonation",
-                "manage-clients",
-                "query-users",
-                "view-identity-providers",
-                "query-groups",
-                "view-clients",
-                "view-users",
-                "manage-authorization",
-                "manage-identity-providers",
-                "query-realms",
-                "query-clients",
-                "view-authorization"
-              ]
-            }
-          },
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "d35aca04-0182-40d3-96b8-1ce5cc118729",
-          "name": "view-clients",
-          "description": "${role_view-clients}",
-          "composite": true,
-          "composites": {
-            "client": {
-              "realm-management": [
-                "query-clients"
-              ]
-            }
-          },
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "7d3b28d5-471a-4b2b-bc80-56d4ff80fd28",
-          "name": "view-users",
-          "description": "${role_view-users}",
-          "composite": true,
-          "composites": {
-            "client": {
-              "realm-management": [
-                "query-users",
-                "query-groups"
-              ]
-            }
-          },
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "651059eb-fc1a-4f8d-9ced-ed28b0a2f965",
-          "name": "manage-authorization",
-          "description": "${role_manage-authorization}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "73f447e9-def8-4214-8516-56571f2c6f65",
-          "name": "manage-identity-providers",
-          "description": "${role_manage-identity-providers}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "1b5f7c39-885e-4246-8cf5-25769544fc3d",
-          "name": "query-realms",
-          "description": "${role_query-realms}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "350da4c1-69d4-4557-a9a8-8ba760db0225",
-          "name": "query-clients",
-          "description": "${role_query-clients}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        },
-        {
-          "id": "43d51082-6922-4765-8022-529d91a4603f",
-          "name": "view-authorization",
-          "description": "${role_view-authorization}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-          "attributes": {}
-        }
-      ],
-      "security-admin-console": [],
-      "admin-cli": [],
-      "account-console": [],
-      "broker": [],
-      "oidc-client": [],
-      "account": [
-        {
-          "id": "a163535c-71de-4b2d-9530-26b25eeb1c1e",
-          "name": "delete-account",
-          "description": "${role_delete-account}",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537",
-          "attributes": {}
-        },
-        {
-          "id": "851c6a9f-bce7-4c70-be82-084c25d61b25",
-          "name": "manage-account",
-          "composite": false,
-          "clientRole": true,
-          "containerId": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537",
-          "attributes": {}
-        }
-      ]
-    }
-  },
-  "groups": [],
-  "defaultRole": {
-    "id": "88b414c4-3516-4486-8f8b-a811ed0e0ce5",
-    "name": "default-roles-oidc-realm",
-    "description": "${role_default-roles}",
-    "composite": true,
-    "clientRole": false,
-    "containerId": "oidc-realm"
-  },
-  "requiredCredentials": [
-    "password"
-  ],
-  "otpPolicyType": "totp",
-  "otpPolicyAlgorithm": "HmacSHA1",
-  "otpPolicyInitialCounter": 0,
-  "otpPolicyDigits": 6,
-  "otpPolicyLookAheadWindow": 1,
-  "otpPolicyPeriod": 30,
-  "otpSupportedApplications": [
-    "FreeOTP",
-    "Google Authenticator"
-  ],
-  "webAuthnPolicyRpEntityName": "keycloak",
-  "webAuthnPolicySignatureAlgorithms": [
-    "ES256"
-  ],
-  "webAuthnPolicyRpId": "",
-  "webAuthnPolicyAttestationConveyancePreference": "not specified",
-  "webAuthnPolicyAuthenticatorAttachment": "not specified",
-  "webAuthnPolicyRequireResidentKey": "not specified",
-  "webAuthnPolicyUserVerificationRequirement": "not specified",
-  "webAuthnPolicyCreateTimeout": 0,
-  "webAuthnPolicyAvoidSameAuthenticatorRegister": false,
-  "webAuthnPolicyAcceptableAaguids": [],
-  "webAuthnPolicyPasswordlessRpEntityName": "keycloak",
-  "webAuthnPolicyPasswordlessSignatureAlgorithms": [
-    "ES256"
-  ],
-  "webAuthnPolicyPasswordlessRpId": "",
-  "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified",
-  "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified",
-  "webAuthnPolicyPasswordlessRequireResidentKey": "not specified",
-  "webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified",
-  "webAuthnPolicyPasswordlessCreateTimeout": 0,
-  "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false,
-  "webAuthnPolicyPasswordlessAcceptableAaguids": [],
-  "users": [
-    {
-      "username": "kcuser",
-      "enabled": true,
-      "totp": false,
-      "emailVerified": true,
-      "firstName": "Test",
-      "lastName": "Test",
-      "email": "test@test.com",
-      "credentials": [
-        {
-          "type": "password",
-          "value": "kcpassword"
-        }
-      ]
-    }
-  ],
-  "scopeMappings": [
-    {
-      "clientScope": "offline_access",
-      "roles": [
-        "offline_access"
-      ]
-    }
-  ],
-  "clientScopeMappings": {
-    "account": [
-      {
-        "client": "account-console",
-        "roles": [
-          "manage-account"
-        ]
-      }
-    ]
-  },
-  "clients": [
-    {
-      "id": "aed2e103-ee29-4d5c-a34e-1b8c65b7d537",
-      "clientId": "account",
-      "name": "${client_account}",
-      "rootUrl": "${authBaseUrl}",
-      "baseUrl": "/realms/oidc-realm/account/",
-      "surrogateAuthRequired": false,
-      "enabled": true,
-      "alwaysDisplayInConsole": false,
-      "clientAuthenticatorType": "client-secret",
-      "redirectUris": [
-        "/realms/oidc-realm/account/*"
-      ],
-      "webOrigins": [],
-      "notBefore": 0,
-      "bearerOnly": false,
-      "consentRequired": false,
-      "standardFlowEnabled": true,
-      "implicitFlowEnabled": false,
-      "directAccessGrantsEnabled": false,
-      "serviceAccountsEnabled": false,
-      "publicClient": true,
-      "frontchannelLogout": false,
-      "protocol": "openid-connect",
-      "attributes": {},
-      "authenticationFlowBindingOverrides": {},
-      "fullScopeAllowed": false,
-      "nodeReRegistrationTimeout": 0,
-      "defaultClientScopes": [
-        "web-origins",
-        "roles",
-        "profile",
-        "email"
-      ],
-      "optionalClientScopes": [
-        "address",
-        "phone",
-        "offline_access",
-        "microprofile-jwt"
-      ]
-    },
-    {
-      "id": "1e821c0e-f6b9-4324-9b23-e82b5431fb72",
-      "clientId": "account-console",
-      "name": "${client_account-console}",
-      "rootUrl": "${authBaseUrl}",
-      "baseUrl": "/realms/oidc-realm/account/",
-      "surrogateAuthRequired": false,
-      "enabled": true,
-      "alwaysDisplayInConsole": false,
-      "clientAuthenticatorType": "client-secret",
-      "redirectUris": [
-        "/realms/oidc-realm/account/*"
-      ],
-      "webOrigins": [],
-      "notBefore": 0,
-      "bearerOnly": false,
-      "consentRequired": false,
-      "standardFlowEnabled": true,
-      "implicitFlowEnabled": false,
-      "directAccessGrantsEnabled": false,
-      "serviceAccountsEnabled": false,
-      "publicClient": true,
-      "frontchannelLogout": false,
-      "protocol": "openid-connect",
-      "attributes": {
-        "pkce.code.challenge.method": "S256"
-      },
-      "authenticationFlowBindingOverrides": {},
-      "fullScopeAllowed": false,
-      "nodeReRegistrationTimeout": 0,
-      "protocolMappers": [
-        {
-          "id": "397616ab-4124-4a13-92b6-317423e818a3",
-          "name": "audience resolve",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-audience-resolve-mapper",
-          "consentRequired": false,
-          "config": {}
-        }
-      ],
-      "defaultClientScopes": [
-        "web-origins",
-        "roles",
-        "profile",
-        "email"
-      ],
-      "optionalClientScopes": [
-        "address",
-        "phone",
-        "offline_access",
-        "microprofile-jwt"
-      ]
-    },
-    {
-      "id": "dddcc3e0-d742-422b-8b5f-84a292ea9d66",
-      "clientId": "admin-cli",
-      "name": "${client_admin-cli}",
-      "surrogateAuthRequired": false,
-      "enabled": true,
-      "alwaysDisplayInConsole": false,
-      "clientAuthenticatorType": "client-secret",
-      "redirectUris": [],
-      "webOrigins": [],
-      "notBefore": 0,
-      "bearerOnly": false,
-      "consentRequired": false,
-      "standardFlowEnabled": false,
-      "implicitFlowEnabled": false,
-      "directAccessGrantsEnabled": true,
-      "serviceAccountsEnabled": false,
-      "publicClient": true,
-      "frontchannelLogout": false,
-      "protocol": "openid-connect",
-      "attributes": {},
-      "authenticationFlowBindingOverrides": {},
-      "fullScopeAllowed": false,
-      "nodeReRegistrationTimeout": 0,
-      "defaultClientScopes": [
-        "web-origins",
-        "roles",
-        "profile",
-        "email"
-      ],
-      "optionalClientScopes": [
-        "address",
-        "phone",
-        "offline_access",
-        "microprofile-jwt"
-      ]
-    },
-    {
-      "id": "df6f6cd0-a046-492f-84ac-b4fe31909be4",
-      "clientId": "broker",
-      "name": "${client_broker}",
-      "surrogateAuthRequired": false,
-      "enabled": true,
-      "alwaysDisplayInConsole": false,
-      "clientAuthenticatorType": "client-secret",
-      "redirectUris": [],
-      "webOrigins": [],
-      "notBefore": 0,
-      "bearerOnly": true,
-      "consentRequired": false,
-      "standardFlowEnabled": true,
-      "implicitFlowEnabled": false,
-      "directAccessGrantsEnabled": false,
-      "serviceAccountsEnabled": false,
-      "publicClient": false,
-      "frontchannelLogout": false,
-      "protocol": "openid-connect",
-      "attributes": {},
-      "authenticationFlowBindingOverrides": {},
-      "fullScopeAllowed": false,
-      "nodeReRegistrationTimeout": 0,
-      "defaultClientScopes": [
-        "web-origins",
-        "roles",
-        "profile",
-        "email"
-      ],
-      "optionalClientScopes": [
-        "address",
-        "phone",
-        "offline_access",
-        "microprofile-jwt"
-      ]
-    },
-    {
-      "id": "c0af31b9-21aa-4e70-baf3-8d68850c4081",
-      "clientId": "oidc-client",
-      "surrogateAuthRequired": false,
-      "enabled": true,
-      "alwaysDisplayInConsole": false,
-      "clientAuthenticatorType": "client-secret",
-      "secret": "ss6gE8mODCDfqesQaSG3gwUwZqZt547E",
-      "redirectUris": [
-        "*"
-      ],
-      "webOrigins": [
-        "+"
-      ],
-      "notBefore": 0,
-      "bearerOnly": false,
-      "consentRequired": false,
-      "standardFlowEnabled": true,
-      "implicitFlowEnabled": false,
-      "directAccessGrantsEnabled": false,
-      "serviceAccountsEnabled": false,
-      "publicClient": false,
-      "frontchannelLogout": false,
-      "protocol": "openid-connect",
-      "attributes": {
-        "saml.force.post.binding": "false",
-        "saml.multivalued.roles": "false",
-        "oauth2.device.authorization.grant.enabled": "false",
-        "use.jwks.url": "true",
-        "backchannel.logout.revoke.offline.tokens": "false",
-        "saml.server.signature.keyinfo.ext": "false",
-        "use.refresh.tokens": "true",
-        "jwt.credential.certificate": "MIICpTCCAY0CBgGE8V6o6TANBgkqhkiG9w0BAQsFADAWMRQwEgYDVQQDDAtvaWRjLWNsaWVudDAeFw0yMjEyMDgxMDUyMDNaFw0zMjEyMDgxMDUzNDNaMBYxFDASBgNVBAMMC29pZGMtY2xpZW50MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArUffTl+jXWzyY3T4VVtkiGyNnY+RgyAXUzz+dxT7wUQaYSiNPvmaxnio555pWjR403SRUjVxM8eJYgHK9s43qQWdheXBIHyLKaQfjVsTtSmHgFtPmjk+kweQs6fxUi5CNvtx4RTCaOK5wV8q5q1X7mb8cZ5+gLSx1f/pHtayFXMT75nV04aZKWgPztPz8w+QXUx9cuFY4OIiTdRbdyfr1oOiDtMbxxA22tggB/HSMVkSckT3LSPj7fJKJMPFYi/g1AXxGipX/q8XkmOBrvNePCpH0F/IZbC1vXEsDC6urfoijOdiZgPMobuADmWHPiw2zgCN8qa6QuLFaI+JduXT9QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCEOYRHkH8DnBucb+uN5c9U/fZY+mpglxzZvby7dGBXfVwLN+eP1kGcQPaFi+nshk7FgF4mR5/cmuAPZt+YBbgP0z37D49nB7S6sniwzfhCAAplOT4vmm+MjperTDsWFUGhQZJvN/jxqP2Xccw7N//ReYi7yOlmWhwGyqQyTi0ySbE3BY5eFvUKepekybYi/15XlyF8lwS2jH1MvnJAxAMNVpVUcP4wTnq/dOw5ybrVWF0mPnA8KVzTPuPE5nzZvZ3rkXQeEJTffIToR+T/DH/KTLXcNUtx4nG0ajJ0gM6iVAXGnKlI9Viq/M5Ese+52I6rQmxTsFMn57LNzKgMpWcE",
-        "oidc.ciba.grant.enabled": "false",
-        "use.jwks.string": "false",
-        "backchannel.logout.session.required": "false",
-        "client_credentials.use_refresh_token": "false",
-        "require.pushed.authorization.requests": "false",
-        "saml.client.signature": "false",
-        "id.token.as.detached.signature": "false",
-        "saml.assertion.signature": "false",
-        "saml.encrypt": "false",
-        "saml.server.signature": "false",
-        "exclude.session.state.from.auth.response": "false",
-        "saml.artifact.binding": "false",
-        "saml_force_name_id_format": "false",
-        "tls.client.certificate.bound.access.tokens": "false",
-        "saml.authnstatement": "false",
-        "display.on.consent.screen": "false",
-        "saml.onetimeuse.condition": "false"
-      },
-      "authenticationFlowBindingOverrides": {},
-      "fullScopeAllowed": true,
-      "nodeReRegistrationTimeout": -1,
-      "defaultClientScopes": [
-        "web-origins",
-        "roles",
-        "profile",
-        "email"
-      ],
-      "optionalClientScopes": [
-        "address",
-        "phone",
-        "offline_access",
-        "microprofile-jwt"
-      ]
-    },
-    {
-      "id": "43ffb712-f233-48e2-ae79-d6993bac34a5",
-      "clientId": "realm-management",
-      "name": "${client_realm-management}",
-      "surrogateAuthRequired": false,
-      "enabled": true,
-      "alwaysDisplayInConsole": false,
-      "clientAuthenticatorType": "client-secret",
-      "redirectUris": [],
-      "webOrigins": [],
-      "notBefore": 0,
-      "bearerOnly": true,
-      "consentRequired": false,
-      "standardFlowEnabled": true,
-      "implicitFlowEnabled": false,
-      "directAccessGrantsEnabled": false,
-      "serviceAccountsEnabled": false,
-      "publicClient": false,
-      "frontchannelLogout": false,
-      "protocol": "openid-connect",
-      "attributes": {},
-      "authenticationFlowBindingOverrides": {},
-      "fullScopeAllowed": false,
-      "nodeReRegistrationTimeout": 0,
-      "defaultClientScopes": [
-        "web-origins",
-        "roles",
-        "profile",
-        "email"
-      ],
-      "optionalClientScopes": [
-        "address",
-        "phone",
-        "offline_access",
-        "microprofile-jwt"
-      ]
-    },
-    {
-      "id": "3747f98f-efbb-49ef-8238-a349bf5ab409",
-      "clientId": "security-admin-console",
-      "name": "${client_security-admin-console}",
-      "rootUrl": "${authAdminUrl}",
-      "baseUrl": "/admin/oidc-realm/console/",
-      "surrogateAuthRequired": false,
-      "enabled": true,
-      "alwaysDisplayInConsole": false,
-      "clientAuthenticatorType": "client-secret",
-      "redirectUris": [
-        "/admin/oidc-realm/console/*"
-      ],
-      "webOrigins": [
-        "+"
-      ],
-      "notBefore": 0,
-      "bearerOnly": false,
-      "consentRequired": false,
-      "standardFlowEnabled": true,
-      "implicitFlowEnabled": false,
-      "directAccessGrantsEnabled": false,
-      "serviceAccountsEnabled": false,
-      "publicClient": true,
-      "frontchannelLogout": false,
-      "protocol": "openid-connect",
-      "attributes": {
-        "pkce.code.challenge.method": "S256"
-      },
-      "authenticationFlowBindingOverrides": {},
-      "fullScopeAllowed": false,
-      "nodeReRegistrationTimeout": 0,
-      "protocolMappers": [
-        {
-          "id": "2fbdf6c9-ee69-4edc-b780-ec62aecfc519",
-          "name": "locale",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "locale",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "locale",
-            "jsonType.label": "String"
-          }
-        }
-      ],
-      "defaultClientScopes": [
-        "web-origins",
-        "roles",
-        "profile",
-        "email"
-      ],
-      "optionalClientScopes": [
-        "address",
-        "phone",
-        "offline_access",
-        "microprofile-jwt"
-      ]
-    }
-  ],
-  "clientScopes": [
-    {
-      "id": "f76f507d-7d1c-495b-9504-47830b3834f1",
-      "name": "phone",
-      "description": "OpenID Connect built-in scope: phone",
-      "protocol": "openid-connect",
-      "attributes": {
-        "include.in.token.scope": "true",
-        "display.on.consent.screen": "true",
-        "consent.screen.text": "${phoneScopeConsentText}"
-      },
-      "protocolMappers": [
-        {
-          "id": "be849ec8-1747-4efb-bc00-beeaf44f11c8",
-          "name": "phone number verified",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "phoneNumberVerified",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "phone_number_verified",
-            "jsonType.label": "boolean"
-          }
-        },
-        {
-          "id": "8e8600ec-4290-435d-b109-9f0547cb4a1d",
-          "name": "phone number",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "phoneNumber",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "phone_number",
-            "jsonType.label": "String"
-          }
-        }
-      ]
-    },
-    {
-      "id": "54b87197-5309-4b2c-8ad9-f561a0fc178a",
-      "name": "role_list",
-      "description": "SAML role list",
-      "protocol": "saml",
-      "attributes": {
-        "consent.screen.text": "${samlRoleListScopeConsentText}",
-        "display.on.consent.screen": "true"
-      },
-      "protocolMappers": [
-        {
-          "id": "5fd831af-19a5-4a9c-b44f-2a806fae011c",
-          "name": "role list",
-          "protocol": "saml",
-          "protocolMapper": "saml-role-list-mapper",
-          "consentRequired": false,
-          "config": {
-            "single": "false",
-            "attribute.nameformat": "Basic",
-            "attribute.name": "Role"
-          }
-        }
-      ]
-    },
-    {
-      "id": "2f85470d-8cb7-4f07-8602-47342d68af86",
-      "name": "web-origins",
-      "description": "OpenID Connect scope for add allowed web origins to the access token",
-      "protocol": "openid-connect",
-      "attributes": {
-        "include.in.token.scope": "false",
-        "display.on.consent.screen": "false",
-        "consent.screen.text": ""
-      },
-      "protocolMappers": [
-        {
-          "id": "c5d2aafc-f72d-4d7b-9d88-cd759f0e045e",
-          "name": "allowed web origins",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-allowed-origins-mapper",
-          "consentRequired": false,
-          "config": {}
-        }
-      ]
-    },
-    {
-      "id": "528face9-229a-4adf-98d8-68b1a22e880d",
-      "name": "microprofile-jwt",
-      "description": "Microprofile - JWT built-in scope",
-      "protocol": "openid-connect",
-      "attributes": {
-        "include.in.token.scope": "true",
-        "display.on.consent.screen": "false"
-      },
-      "protocolMappers": [
-        {
-          "id": "89240a7c-10f3-4e09-9d6b-41955b86c58d",
-          "name": "groups",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-realm-role-mapper",
-          "consentRequired": false,
-          "config": {
-            "multivalued": "true",
-            "userinfo.token.claim": "true",
-            "user.attribute": "foo",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "groups",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "15b6db72-4870-480e-a675-87f87df5f8a5",
-          "name": "upn",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-property-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "username",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "upn",
-            "jsonType.label": "String"
-          }
-        }
-      ]
-    },
-    {
-      "id": "cdd11477-b02b-4886-bc6d-cf4b728ebc0e",
-      "name": "email",
-      "description": "OpenID Connect built-in scope: email",
-      "protocol": "openid-connect",
-      "attributes": {
-        "include.in.token.scope": "true",
-        "display.on.consent.screen": "true",
-        "consent.screen.text": "${emailScopeConsentText}"
-      },
-      "protocolMappers": [
-        {
-          "id": "627b9f4f-23d6-4480-adf4-264faf58de33",
-          "name": "email verified",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-property-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "emailVerified",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "email_verified",
-            "jsonType.label": "boolean"
-          }
-        },
-        {
-          "id": "6a2adf2e-db2d-4ebe-8d48-f658f9b4a5ca",
-          "name": "email",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-property-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "email",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "email",
-            "jsonType.label": "String"
-          }
-        }
-      ]
-    },
-    {
-      "id": "8f830142-b3f1-40f0-82e2-ceed68857a40",
-      "name": "roles",
-      "description": "OpenID Connect scope for add user roles to the access token",
-      "protocol": "openid-connect",
-      "attributes": {
-        "include.in.token.scope": "false",
-        "display.on.consent.screen": "true",
-        "consent.screen.text": "${rolesScopeConsentText}"
-      },
-      "protocolMappers": [
-        {
-          "id": "28a96dc6-c4dc-4aae-b316-28b56dccd077",
-          "name": "audience resolve",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-audience-resolve-mapper",
-          "consentRequired": false,
-          "config": {}
-        },
-        {
-          "id": "3e81050f-540e-4f3d-9abf-86406e484f76",
-          "name": "realm roles",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-realm-role-mapper",
-          "consentRequired": false,
-          "config": {
-            "user.attribute": "foo",
-            "access.token.claim": "true",
-            "claim.name": "realm_access.roles",
-            "jsonType.label": "String",
-            "multivalued": "true"
-          }
-        },
-        {
-          "id": "13afa1f4-3fac-4c90-a9b4-e84e682f46e9",
-          "name": "client roles",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-client-role-mapper",
-          "consentRequired": false,
-          "config": {
-            "user.attribute": "foo",
-            "access.token.claim": "true",
-            "claim.name": "resource_access.${client_id}.roles",
-            "jsonType.label": "String",
-            "multivalued": "true"
-          }
-        }
-      ]
-    },
-    {
-      "id": "3beac2fc-e947-408f-8422-ca9a1e66a258",
-      "name": "address",
-      "description": "OpenID Connect built-in scope: address",
-      "protocol": "openid-connect",
-      "attributes": {
-        "include.in.token.scope": "true",
-        "display.on.consent.screen": "true",
-        "consent.screen.text": "${addressScopeConsentText}"
-      },
-      "protocolMappers": [
-        {
-          "id": "12911891-db5c-4a35-80fa-555c5eda7e68",
-          "name": "address",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-address-mapper",
-          "consentRequired": false,
-          "config": {
-            "user.attribute.formatted": "formatted",
-            "user.attribute.country": "country",
-            "user.attribute.postal_code": "postal_code",
-            "userinfo.token.claim": "true",
-            "user.attribute.street": "street",
-            "id.token.claim": "true",
-            "user.attribute.region": "region",
-            "access.token.claim": "true",
-            "user.attribute.locality": "locality"
-          }
-        }
-      ]
-    },
-    {
-      "id": "8a29297a-e6f6-41ae-b25d-8a14236de535",
-      "name": "offline_access",
-      "description": "OpenID Connect built-in scope: offline_access",
-      "protocol": "openid-connect",
-      "attributes": {
-        "consent.screen.text": "${offlineAccessScopeConsentText}",
-        "display.on.consent.screen": "true"
-      }
-    },
-    {
-      "id": "ce1622c5-701f-4e3e-9d2d-8dae0f07a295",
-      "name": "profile",
-      "description": "OpenID Connect built-in scope: profile",
-      "protocol": "openid-connect",
-      "attributes": {
-        "include.in.token.scope": "true",
-        "display.on.consent.screen": "true",
-        "consent.screen.text": "${profileScopeConsentText}"
-      },
-      "protocolMappers": [
-        {
-          "id": "98cc62b8-250a-4087-92da-bb0f0931e675",
-          "name": "full name",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-full-name-mapper",
-          "consentRequired": false,
-          "config": {
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "userinfo.token.claim": "true"
-          }
-        },
-        {
-          "id": "b99c8c44-4cc9-4c87-a5a1-c14e64d472ae",
-          "name": "given name",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-property-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "firstName",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "given_name",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "903d5932-bdec-42bc-a53c-3cce93deaa1c",
-          "name": "zoneinfo",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "zoneinfo",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "zoneinfo",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "ccbdc095-28f7-4769-8261-2e32c7b6fab0",
-          "name": "picture",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "picture",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "picture",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "22a4a38c-f755-44f3-b847-803c7fb3cef5",
-          "name": "birthdate",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "birthdate",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "birthdate",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "78726920-b4e2-4ed2-b9e0-df38a7f82376",
-          "name": "updated at",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "updatedAt",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "updated_at",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "c64c6eb8-5cbe-4092-bf2c-dd02b8c0e0e8",
-          "name": "family name",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-property-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "lastName",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "family_name",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "306784d8-8da1-48d8-92a3-dccfff83bcaf",
-          "name": "middle name",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "middleName",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "middle_name",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "0ff127fa-774e-43a8-a1fc-47ea3f307aa1",
-          "name": "website",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "website",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "website",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "8989c6f8-25c5-4d02-aa06-25b3b77fc227",
-          "name": "profile",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "profile",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "profile",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "3b67000c-9cbf-43ee-9e05-26f560871897",
-          "name": "gender",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "gender",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "gender",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "c28b04de-2770-423e-9b9a-b3321d7300e2",
-          "name": "nickname",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "nickname",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "nickname",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "fd791ed4-d4ab-4df9-81b4-c69a3134bcab",
-          "name": "username",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-property-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "username",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "preferred_username",
-            "jsonType.label": "String"
-          }
-        },
-        {
-          "id": "c7378ce5-3673-47b2-9ebc-92c772bebf9f",
-          "name": "locale",
-          "protocol": "openid-connect",
-          "protocolMapper": "oidc-usermodel-attribute-mapper",
-          "consentRequired": false,
-          "config": {
-            "userinfo.token.claim": "true",
-            "user.attribute": "locale",
-            "id.token.claim": "true",
-            "access.token.claim": "true",
-            "claim.name": "locale",
-            "jsonType.label": "String"
-          }
-        }
-      ]
-    }
-  ],
-  "defaultDefaultClientScopes": [
-    "web-origins",
-    "role_list",
-    "roles",
-    "email",
-    "profile"
-  ],
-  "defaultOptionalClientScopes": [
-    "address",
-    "microprofile-jwt",
-    "offline_access",
-    "phone"
-  ],
-  "browserSecurityHeaders": {
-    "contentSecurityPolicyReportOnly": "",
-    "xContentTypeOptions": "nosniff",
-    "xRobotsTag": "none",
-    "xFrameOptions": "SAMEORIGIN",
-    "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';",
-    "xXSSProtection": "1; mode=block",
-    "strictTransportSecurity": "max-age=31536000; includeSubDomains"
-  },
-  "smtpServer": {},
-  "eventsEnabled": false,
-  "eventsListeners": [
-    "jboss-logging"
-  ],
-  "enabledEventTypes": [],
-  "adminEventsEnabled": false,
-  "adminEventsDetailsEnabled": false,
-  "identityProviders": [],
-  "identityProviderMappers": [],
-  "components": {
-    "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [
-      {
-        "id": "8e2d0c22-0627-4115-9f14-4225244333d9",
-        "name": "Trusted Hosts",
-        "providerId": "trusted-hosts",
-        "subType": "anonymous",
-        "subComponents": {},
-        "config": {
-          "host-sending-registration-request-must-match": [
-            "true"
-          ],
-          "client-uris-must-match": [
-            "true"
-          ]
-        }
-      },
-      {
-        "id": "45bdde87-a364-4d66-a12e-1a4fd42c85fb",
-        "name": "Full Scope Disabled",
-        "providerId": "scope",
-        "subType": "anonymous",
-        "subComponents": {},
-        "config": {}
-      },
-      {
-        "id": "7b7d3215-68d2-41db-bc0f-db0a45934a84",
-        "name": "Allowed Client Scopes",
-        "providerId": "allowed-client-templates",
-        "subType": "anonymous",
-        "subComponents": {},
-        "config": {
-          "allow-default-scopes": [
-            "true"
-          ]
-        }
-      },
-      {
-        "id": "e067781a-6058-4f2b-9408-3390e9854cf8",
-        "name": "Consent Required",
-        "providerId": "consent-required",
-        "subType": "anonymous",
-        "subComponents": {},
-        "config": {}
-      },
-      {
-        "id": "296be954-8084-45c8-b6f3-94d53f7341f6",
-        "name": "Allowed Protocol Mapper Types",
-        "providerId": "allowed-protocol-mappers",
-        "subType": "anonymous",
-        "subComponents": {},
-        "config": {
-          "allowed-protocol-mapper-types": [
-            "saml-role-list-mapper",
-            "saml-user-property-mapper",
-            "oidc-usermodel-attribute-mapper",
-            "oidc-address-mapper",
-            "oidc-sha256-pairwise-sub-mapper",
-            "saml-user-attribute-mapper",
-            "oidc-usermodel-property-mapper",
-            "oidc-full-name-mapper"
-          ]
-        }
-      },
-      {
-        "id": "b9a2a484-aee1-4633-aa37-a9ab2b74a239",
-        "name": "Allowed Client Scopes",
-        "providerId": "allowed-client-templates",
-        "subType": "authenticated",
-        "subComponents": {},
-        "config": {
-          "allow-default-scopes": [
-            "true"
-          ]
-        }
-      },
-      {
-        "id": "016e4914-a32c-40fa-8aab-3eb25a411df5",
-        "name": "Max Clients Limit",
-        "providerId": "max-clients",
-        "subType": "anonymous",
-        "subComponents": {},
-        "config": {
-          "max-clients": [
-            "200"
-          ]
-        }
-      },
-      {
-        "id": "a4fb2fa3-93b8-4497-8047-424f70f298c7",
-        "name": "Allowed Protocol Mapper Types",
-        "providerId": "allowed-protocol-mappers",
-        "subType": "authenticated",
-        "subComponents": {},
-        "config": {
-          "allowed-protocol-mapper-types": [
-            "oidc-sha256-pairwise-sub-mapper",
-            "oidc-full-name-mapper",
-            "saml-user-property-mapper",
-            "saml-role-list-mapper",
-            "oidc-usermodel-attribute-mapper",
-            "oidc-address-mapper",
-            "oidc-usermodel-property-mapper",
-            "saml-user-attribute-mapper"
-          ]
-        }
-      }
-    ],
-    "org.keycloak.keys.KeyProvider": [
-      {
-        "id": "31b693fa-2b95-47a6-96a1-dfff868ca1df",
-        "name": "rsa-enc-generated",
-        "providerId": "rsa-enc-generated",
-        "subComponents": {},
-        "config": {
-          "priority": [
-            "100"
-          ],
-          "algorithm": [
-            "RSA-OAEP"
-          ]
-        }
-      },
-      {
-        "id": "f1e63d09-45a0-4382-8346-0408ee906649",
-        "name": "hmac-generated",
-        "providerId": "hmac-generated",
-        "subComponents": {},
-        "config": {
-          "priority": [
-            "100"
-          ],
-          "algorithm": [
-            "HS256"
-          ]
-        }
-      },
-      {
-        "id": "99084d92-06f5-4787-b932-a40b5377f3cb",
-        "name": "rsa-generated",
-        "providerId": "rsa-generated",
-        "subComponents": {},
-        "config": {
-          "priority": [
-            "100"
-          ]
-        }
-      },
-      {
-        "id": "9887f1bf-b4f7-4646-9919-a9dbde13ce74",
-        "name": "aes-generated",
-        "providerId": "aes-generated",
-        "subComponents": {},
-        "config": {
-          "priority": [
-            "100"
-          ]
-        }
-      }
-    ]
-  },
-  "internationalizationEnabled": false,
-  "supportedLocales": [],
-  "authenticationFlows": [
-    {
-      "id": "a7f91199-178d-4399-8319-5063ffcc37b0",
-      "alias": "Account verification options",
-      "description": "Method with which to verity the existing account",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "idp-email-verification",
-          "authenticatorFlow": false,
-          "requirement": "ALTERNATIVE",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "ALTERNATIVE",
-          "priority": 20,
-          "flowAlias": "Verify Existing Account by Re-authentication",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "602533e3-f7a1-4e25-9a12-f3080eeccec3",
-      "alias": "Authentication Options",
-      "description": "Authentication options.",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "basic-auth",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "basic-auth-otp",
-          "authenticatorFlow": false,
-          "requirement": "DISABLED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "auth-spnego",
-          "authenticatorFlow": false,
-          "requirement": "DISABLED",
-          "priority": 30,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    },
-    {
-      "id": "ba7bcdfd-05c6-4da6-827b-24e3513bddbe",
-      "alias": "Browser - Conditional OTP",
-      "description": "Flow to determine if the OTP is required for the authentication",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "conditional-user-configured",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "auth-otp-form",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    },
-    {
-      "id": "d0f62327-ef2f-4561-8b5a-1f61faecdac0",
-      "alias": "Direct Grant - Conditional OTP",
-      "description": "Flow to determine if the OTP is required for the authentication",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "conditional-user-configured",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "direct-grant-validate-otp",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    },
-    {
-      "id": "f10b85d0-26ee-4648-b81b-80213b066d76",
-      "alias": "First broker login - Conditional OTP",
-      "description": "Flow to determine if the OTP is required for the authentication",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "conditional-user-configured",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "auth-otp-form",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    },
-    {
-      "id": "d6af4ac0-f6bc-4197-bf01-6e2c321ecaad",
-      "alias": "Handle Existing Account",
-      "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "idp-confirm-link",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "flowAlias": "Account verification options",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "501ab743-2e2f-427d-820f-14deed111b08",
-      "alias": "Reset - Conditional OTP",
-      "description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "conditional-user-configured",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "reset-otp",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    },
-    {
-      "id": "e02c3a63-a09d-4dde-9f6c-22c95eef8534",
-      "alias": "User creation or linking",
-      "description": "Flow for the existing/non-existing user alternatives",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticatorConfig": "create unique user config",
-          "authenticator": "idp-create-user-if-unique",
-          "authenticatorFlow": false,
-          "requirement": "ALTERNATIVE",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "ALTERNATIVE",
-          "priority": 20,
-          "flowAlias": "Handle Existing Account",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "c348906d-6266-4e68-937e-8f3d15c66524",
-      "alias": "Verify Existing Account by Re-authentication",
-      "description": "Reauthentication of existing account",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "idp-username-password-form",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "CONDITIONAL",
-          "priority": 20,
-          "flowAlias": "First broker login - Conditional OTP",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "cf6ba166-43d5-4687-95c4-0a184ca08885",
-      "alias": "browser",
-      "description": "browser based authentication",
-      "providerId": "basic-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "auth-cookie",
-          "authenticatorFlow": false,
-          "requirement": "ALTERNATIVE",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "auth-spnego",
-          "authenticatorFlow": false,
-          "requirement": "DISABLED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "identity-provider-redirector",
-          "authenticatorFlow": false,
-          "requirement": "ALTERNATIVE",
-          "priority": 25,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "ALTERNATIVE",
-          "priority": 30,
-          "flowAlias": "forms",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "87cb4f25-9275-4617-9e95-63adf1ce3ece",
-      "alias": "clients",
-      "description": "Base authentication for clients",
-      "providerId": "client-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "client-secret",
-          "authenticatorFlow": false,
-          "requirement": "ALTERNATIVE",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "client-jwt",
-          "authenticatorFlow": false,
-          "requirement": "ALTERNATIVE",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "client-secret-jwt",
-          "authenticatorFlow": false,
-          "requirement": "ALTERNATIVE",
-          "priority": 30,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "client-x509",
-          "authenticatorFlow": false,
-          "requirement": "ALTERNATIVE",
-          "priority": 40,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    },
-    {
-      "id": "e75b99c5-c566-4009-b0ba-c73716bed254",
-      "alias": "direct grant",
-      "description": "OpenID Connect Resource Owner Grant",
-      "providerId": "basic-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "direct-grant-validate-username",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "direct-grant-validate-password",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "CONDITIONAL",
-          "priority": 30,
-          "flowAlias": "Direct Grant - Conditional OTP",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "8a97380c-0f70-45cb-a7b0-780eb70453ba",
-      "alias": "docker auth",
-      "description": "Used by Docker clients to authenticate against the IDP",
-      "providerId": "basic-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "docker-http-basic-authenticator",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    },
-    {
-      "id": "131e0aad-5422-4504-bafc-96be2fa44c34",
-      "alias": "first broker login",
-      "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
-      "providerId": "basic-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticatorConfig": "review profile config",
-          "authenticator": "idp-review-profile",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "flowAlias": "User creation or linking",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "e7d4b793-b3c2-4ec3-a2b1-04f7217e8f46",
-      "alias": "forms",
-      "description": "Username, password, otp and other auth forms.",
-      "providerId": "basic-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "auth-username-password-form",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "CONDITIONAL",
-          "priority": 20,
-          "flowAlias": "Browser - Conditional OTP",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "f59a7688-61a1-4ac9-a13a-03f92e022add",
-      "alias": "http challenge",
-      "description": "An authentication flow based on challenge-response HTTP Authentication Schemes",
-      "providerId": "basic-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "no-cookie-redirect",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "flowAlias": "Authentication Options",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "80a7b0f5-abb3-4780-be58-4ed1dc3e50fa",
-      "alias": "registration",
-      "description": "registration flow",
-      "providerId": "basic-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "registration-page-form",
-          "authenticatorFlow": true,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "flowAlias": "registration form",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "f18231cf-b803-493b-9dd6-ee8fa602c861",
-      "alias": "registration form",
-      "description": "registration form",
-      "providerId": "form-flow",
-      "topLevel": false,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "registration-user-creation",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "registration-profile-action",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 40,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "registration-password-action",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 50,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "registration-recaptcha-action",
-          "authenticatorFlow": false,
-          "requirement": "DISABLED",
-          "priority": 60,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    },
-    {
-      "id": "34ccfce6-1488-4db3-b90e-d98e8d8b2ae6",
-      "alias": "reset credentials",
-      "description": "Reset credentials for a user if they forgot their password or something",
-      "providerId": "basic-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "reset-credentials-choose-user",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "reset-credential-email",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 20,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticator": "reset-password",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 30,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        },
-        {
-          "authenticatorFlow": true,
-          "requirement": "CONDITIONAL",
-          "priority": 40,
-          "flowAlias": "Reset - Conditional OTP",
-          "userSetupAllowed": false,
-          "autheticatorFlow": true
-        }
-      ]
-    },
-    {
-      "id": "4468100c-fa83-4c16-8970-d53cb592f93a",
-      "alias": "saml ecp",
-      "description": "SAML ECP Profile Authentication Flow",
-      "providerId": "basic-flow",
-      "topLevel": true,
-      "builtIn": true,
-      "authenticationExecutions": [
-        {
-          "authenticator": "http-basic-authenticator",
-          "authenticatorFlow": false,
-          "requirement": "REQUIRED",
-          "priority": 10,
-          "userSetupAllowed": false,
-          "autheticatorFlow": false
-        }
-      ]
-    }
-  ],
-  "authenticatorConfig": [
-    {
-      "id": "c3bb087e-7fe9-4f13-b1bd-c2d7d1320054",
-      "alias": "create unique user config",
-      "config": {
-        "require.password.update.after.registration": "false"
-      }
-    },
-    {
-      "id": "09820d9d-3c12-45f3-bc62-97b53f8a7efe",
-      "alias": "review profile config",
-      "config": {
-        "update.profile.on.first.login": "missing"
-      }
-    }
-  ],
-  "requiredActions": [
-    {
-      "alias": "CONFIGURE_TOTP",
-      "name": "Configure OTP",
-      "providerId": "CONFIGURE_TOTP",
-      "enabled": true,
-      "defaultAction": false,
-      "priority": 10,
-      "config": {}
-    },
-    {
-      "alias": "terms_and_conditions",
-      "name": "Terms and Conditions",
-      "providerId": "terms_and_conditions",
-      "enabled": false,
-      "defaultAction": false,
-      "priority": 20,
-      "config": {}
-    },
-    {
-      "alias": "UPDATE_PASSWORD",
-      "name": "Update Password",
-      "providerId": "UPDATE_PASSWORD",
-      "enabled": true,
-      "defaultAction": false,
-      "priority": 30,
-      "config": {}
-    },
-    {
-      "alias": "UPDATE_PROFILE",
-      "name": "Update Profile",
-      "providerId": "UPDATE_PROFILE",
-      "enabled": true,
-      "defaultAction": false,
-      "priority": 40,
-      "config": {}
-    },
-    {
-      "alias": "VERIFY_EMAIL",
-      "name": "Verify Email",
-      "providerId": "VERIFY_EMAIL",
-      "enabled": true,
-      "defaultAction": false,
-      "priority": 50,
-      "config": {}
-    },
-    {
-      "alias": "delete_account",
-      "name": "Delete Account",
-      "providerId": "delete_account",
-      "enabled": false,
-      "defaultAction": false,
-      "priority": 60,
-      "config": {}
-    },
-    {
-      "alias": "update_user_locale",
-      "name": "Update User Locale",
-      "providerId": "update_user_locale",
-      "enabled": true,
-      "defaultAction": false,
-      "priority": 1000,
-      "config": {}
-    }
-  ],
-  "browserFlow": "browser",
-  "registrationFlow": "registration",
-  "directGrantFlow": "direct grant",
-  "resetCredentialsFlow": "reset credentials",
-  "clientAuthenticationFlow": "clients",
-  "dockerAuthenticationFlow": "docker auth",
-  "attributes": {
-    "cibaBackchannelTokenDeliveryMode": "poll",
-    "cibaExpiresIn": "120",
-    "cibaAuthRequestedUserHint": "login_hint",
-    "oauth2DeviceCodeLifespan": "600",
-    "clientOfflineSessionMaxLifespan": "0",
-    "oauth2DevicePollingInterval": "5",
-    "clientSessionIdleTimeout": "0",
-    "parRequestUriLifespan": "60",
-    "clientSessionMaxLifespan": "0",
-    "clientOfflineSessionIdleTimeout": "0",
-    "cibaInterval": "5"
-  },
-  "keycloakVersion": "16.1.1",
-  "userManagedAccessAllowed": false,
-  "clientProfiles": {
-    "profiles": []
-  },
-  "clientPolicies": {
-    "policies": []
-  }
-}
diff --git a/conf/keycloak/run-keycloak.sh b/conf/keycloak/run-keycloak.sh
index effb37f91b8..ddc5108bee4 100755
--- a/conf/keycloak/run-keycloak.sh
+++ b/conf/keycloak/run-keycloak.sh
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-DOCKER_IMAGE="jboss/keycloak:16.1.1"
+DOCKER_IMAGE="quay.io/keycloak/keycloak:21.0"
 KEYCLOAK_USER="kcadmin"
 KEYCLOAK_PASSWORD="kcpassword"
 KEYCLOAK_PORT=8090
@@ -11,7 +11,7 @@ if [ ! "$(docker ps -q -f name=^/keycloak$)" ]; then
     docker start keycloak
     echo "INFO - Keycloak container restarted"
   else
-    docker run -d --name keycloak -p $KEYCLOAK_PORT:8080 -e KEYCLOAK_USER=$KEYCLOAK_USER -e KEYCLOAK_PASSWORD=$KEYCLOAK_PASSWORD -e KEYCLOAK_IMPORT=/tmp/oidc-realm.json -v "$(pwd)"/oidc-realm.json:/tmp/oidc-realm.json $DOCKER_IMAGE
+    docker run -d --name keycloak -p $KEYCLOAK_PORT:8080 -e KEYCLOAK_USER=$KEYCLOAK_USER -e KEYCLOAK_PASSWORD=$KEYCLOAK_PASSWORD -e KEYCLOAK_IMPORT=/tmp/test-realm.json -v "$(pwd)"/test-realm.json:/tmp/test-realm.json $DOCKER_IMAGE
     echo "INFO - Keycloak container created and running"
   fi
 else
diff --git a/conf/keycloak/test-realm.json b/conf/keycloak/test-realm.json
new file mode 100644
index 00000000000..efe71cc5d29
--- /dev/null
+++ b/conf/keycloak/test-realm.json
@@ -0,0 +1,1939 @@
+{
+  "id" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
+  "realm" : "test",
+  "displayName" : "",
+  "displayNameHtml" : "",
+  "notBefore" : 0,
+  "defaultSignatureAlgorithm" : "RS256",
+  "revokeRefreshToken" : false,
+  "refreshTokenMaxReuse" : 0,
+  "accessTokenLifespan" : 300,
+  "accessTokenLifespanForImplicitFlow" : 900,
+  "ssoSessionIdleTimeout" : 1800,
+  "ssoSessionMaxLifespan" : 36000,
+  "ssoSessionIdleTimeoutRememberMe" : 0,
+  "ssoSessionMaxLifespanRememberMe" : 0,
+  "offlineSessionIdleTimeout" : 2592000,
+  "offlineSessionMaxLifespanEnabled" : false,
+  "offlineSessionMaxLifespan" : 5184000,
+  "clientSessionIdleTimeout" : 0,
+  "clientSessionMaxLifespan" : 0,
+  "clientOfflineSessionIdleTimeout" : 0,
+  "clientOfflineSessionMaxLifespan" : 0,
+  "accessCodeLifespan" : 60,
+  "accessCodeLifespanUserAction" : 300,
+  "accessCodeLifespanLogin" : 1800,
+  "actionTokenGeneratedByAdminLifespan" : 43200,
+  "actionTokenGeneratedByUserLifespan" : 300,
+  "oauth2DeviceCodeLifespan" : 600,
+  "oauth2DevicePollingInterval" : 5,
+  "enabled" : true,
+  "sslRequired" : "none",
+  "registrationAllowed" : false,
+  "registrationEmailAsUsername" : false,
+  "rememberMe" : false,
+  "verifyEmail" : false,
+  "loginWithEmailAllowed" : true,
+  "duplicateEmailsAllowed" : false,
+  "resetPasswordAllowed" : false,
+  "editUsernameAllowed" : false,
+  "bruteForceProtected" : false,
+  "permanentLockout" : false,
+  "maxFailureWaitSeconds" : 900,
+  "minimumQuickLoginWaitSeconds" : 60,
+  "waitIncrementSeconds" : 60,
+  "quickLoginCheckMilliSeconds" : 1000,
+  "maxDeltaTimeSeconds" : 43200,
+  "failureFactor" : 30,
+  "roles" : {
+    "realm" : [ {
+      "id" : "075daee1-5ab2-44b5-adbf-fa49a3da8305",
+      "name" : "uma_authorization",
+      "description" : "${role_uma_authorization}",
+      "composite" : false,
+      "clientRole" : false,
+      "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
+      "attributes" : { }
+    }, {
+      "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71",
+      "name" : "default-roles-test",
+      "description" : "${role_default-roles}",
+      "composite" : true,
+      "composites" : {
+        "realm" : [ "offline_access", "uma_authorization" ],
+        "client" : {
+          "account" : [ "view-profile", "manage-account" ]
+        }
+      },
+      "clientRole" : false,
+      "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
+      "attributes" : { }
+    }, {
+      "id" : "e6d31555-6be6-4dee-bc6a-40a53108e4c2",
+      "name" : "offline_access",
+      "description" : "${role_offline-access}",
+      "composite" : false,
+      "clientRole" : false,
+      "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983",
+      "attributes" : { }
+    } ],
+    "client" : {
+      "realm-management" : [ {
+        "id" : "1955bd12-5f86-4a74-b130-d68a8ef6f0ee",
+        "name" : "impersonation",
+        "description" : "${role_impersonation}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "1109c350-9ab1-426c-9876-ef67d4310f35",
+        "name" : "view-authorization",
+        "description" : "${role_view-authorization}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "980c3fd3-1ae3-4b8f-9a00-d764c939035f",
+        "name" : "query-users",
+        "description" : "${role_query-users}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "5363e601-0f9d-4633-a8c8-28cb0f859b7b",
+        "name" : "query-groups",
+        "description" : "${role_query-groups}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "59aa7992-ad78-48db-868a-25d6e1d7db50",
+        "name" : "realm-admin",
+        "description" : "${role_realm-admin}",
+        "composite" : true,
+        "composites" : {
+          "client" : {
+            "realm-management" : [ "impersonation", "view-authorization", "query-users", "query-groups", "manage-clients", "manage-realm", "view-identity-providers", "query-realms", "manage-authorization", "manage-identity-providers", "manage-users", "view-users", "view-realm", "create-client", "view-clients", "manage-events", "query-clients", "view-events" ]
+          }
+        },
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "112f53c2-897d-4c01-81db-b8dc10c5b995",
+        "name" : "manage-clients",
+        "description" : "${role_manage-clients}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "c7f57bbd-ef32-4a64-9888-7b8abd90777a",
+        "name" : "manage-realm",
+        "description" : "${role_manage-realm}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "8885dac8-0af3-45af-94ce-eff5e801bb80",
+        "name" : "view-identity-providers",
+        "description" : "${role_view-identity-providers}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "2673346c-b0ef-4e01-8a90-be03866093af",
+        "name" : "manage-authorization",
+        "description" : "${role_manage-authorization}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "b7182885-9e57-445f-8dae-17c16eb31b5d",
+        "name" : "manage-identity-providers",
+        "description" : "${role_manage-identity-providers}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "ba7bfe0c-cb07-4a47-b92c-b8132b57e181",
+        "name" : "manage-users",
+        "description" : "${role_manage-users}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "13a8f0fc-647d-4bfe-b525-73956898e550",
+        "name" : "query-realms",
+        "description" : "${role_query-realms}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "ef4c57dc-78c2-4f9a-8d2b-0e97d46fc842",
+        "name" : "view-realm",
+        "description" : "${role_view-realm}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "2875da34-006c-4b7f-bfc8-9ae8e46af3a2",
+        "name" : "view-users",
+        "description" : "${role_view-users}",
+        "composite" : true,
+        "composites" : {
+          "client" : {
+            "realm-management" : [ "query-users", "query-groups" ]
+          }
+        },
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "c8c8f7dc-876b-4263-806f-3329f7cd5fd3",
+        "name" : "create-client",
+        "description" : "${role_create-client}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "21b84f90-5a9a-4845-a7ba-bbd98ac0fcc4",
+        "name" : "view-clients",
+        "description" : "${role_view-clients}",
+        "composite" : true,
+        "composites" : {
+          "client" : {
+            "realm-management" : [ "query-clients" ]
+          }
+        },
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "6fd64c94-d663-4501-ad77-0dcf8887d434",
+        "name" : "manage-events",
+        "description" : "${role_manage-events}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "b321927a-023c-4d2a-99ad-24baf7ff6d83",
+        "name" : "query-clients",
+        "description" : "${role_query-clients}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      }, {
+        "id" : "2fc21160-78de-457b-8594-e5c76cde1d5e",
+        "name" : "view-events",
+        "description" : "${role_view-events}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+        "attributes" : { }
+      } ],
+      "test" : [ ],
+      "security-admin-console" : [ ],
+      "admin-cli" : [ ],
+      "account-console" : [ ],
+      "broker" : [ {
+        "id" : "07ee59b5-dca6-48fb-83d4-2994ef02850e",
+        "name" : "read-token",
+        "description" : "${role_read-token}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "b57d62bb-77ff-42bd-b8ff-381c7288f327",
+        "attributes" : { }
+      } ],
+      "account" : [ {
+        "id" : "17d2f811-7bdf-4c73-83b4-1037001797b8",
+        "name" : "view-applications",
+        "description" : "${role_view-applications}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+        "attributes" : { }
+      }, {
+        "id" : "d1ff44f9-419e-42fd-98e8-1add1169a972",
+        "name" : "delete-account",
+        "description" : "${role_delete-account}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+        "attributes" : { }
+      }, {
+        "id" : "14c23a18-ae2d-43c9-b0c0-aaf6e0c7f5b0",
+        "name" : "manage-account-links",
+        "description" : "${role_manage-account-links}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+        "attributes" : { }
+      }, {
+        "id" : "6fbe58af-d2fe-4d66-95fe-a2e8a818cb55",
+        "name" : "view-profile",
+        "description" : "${role_view-profile}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+        "attributes" : { }
+      }, {
+        "id" : "bdfd02bc-6f6a-47d2-82bc-0ca52d78ff48",
+        "name" : "manage-consent",
+        "description" : "${role_manage-consent}",
+        "composite" : true,
+        "composites" : {
+          "client" : {
+            "account" : [ "view-consent" ]
+          }
+        },
+        "clientRole" : true,
+        "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+        "attributes" : { }
+      }, {
+        "id" : "782f3b0c-a17b-4a87-988b-1a711401f3b0",
+        "name" : "manage-account",
+        "description" : "${role_manage-account}",
+        "composite" : true,
+        "composites" : {
+          "client" : {
+            "account" : [ "manage-account-links" ]
+          }
+        },
+        "clientRole" : true,
+        "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+        "attributes" : { }
+      }, {
+        "id" : "8a3bfe15-66d9-4f3d-83ac-801d682d42b0",
+        "name" : "view-consent",
+        "description" : "${role_view-consent}",
+        "composite" : false,
+        "clientRole" : true,
+        "containerId" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+        "attributes" : { }
+      } ]
+    }
+  },
+  "groups" : [ {
+    "id" : "d46f94c2-3b47-4288-b937-9cf918e54f0a",
+    "name" : "admins",
+    "path" : "/admins",
+    "attributes" : { },
+    "realmRoles" : [ ],
+    "clientRoles" : { },
+    "subGroups" : [ ]
+  }, {
+    "id" : "e992ce15-baac-48a0-8834-06f6fcf6c05b",
+    "name" : "curators",
+    "path" : "/curators",
+    "attributes" : { },
+    "realmRoles" : [ ],
+    "clientRoles" : { },
+    "subGroups" : [ ]
+  }, {
+    "id" : "531cf81d-a700-4336-808f-37a49709b48c",
+    "name" : "members",
+    "path" : "/members",
+    "attributes" : { },
+    "realmRoles" : [ ],
+    "clientRoles" : { },
+    "subGroups" : [ ]
+  } ],
+  "defaultRole" : {
+    "id" : "b4ff9091-ddf9-4536-b175-8cfa3e331d71",
+    "name" : "default-roles-test",
+    "description" : "${role_default-roles}",
+    "composite" : true,
+    "clientRole" : false,
+    "containerId" : "80a7e04b-a2b5-4891-a2d1-5ad4e915f983"
+  },
+  "requiredCredentials" : [ "password" ],
+  "otpPolicyType" : "totp",
+  "otpPolicyAlgorithm" : "HmacSHA1",
+  "otpPolicyInitialCounter" : 0,
+  "otpPolicyDigits" : 6,
+  "otpPolicyLookAheadWindow" : 1,
+  "otpPolicyPeriod" : 30,
+  "otpSupportedApplications" : [ "FreeOTP", "Google Authenticator" ],
+  "webAuthnPolicyRpEntityName" : "keycloak",
+  "webAuthnPolicySignatureAlgorithms" : [ "ES256" ],
+  "webAuthnPolicyRpId" : "",
+  "webAuthnPolicyAttestationConveyancePreference" : "not specified",
+  "webAuthnPolicyAuthenticatorAttachment" : "not specified",
+  "webAuthnPolicyRequireResidentKey" : "not specified",
+  "webAuthnPolicyUserVerificationRequirement" : "not specified",
+  "webAuthnPolicyCreateTimeout" : 0,
+  "webAuthnPolicyAvoidSameAuthenticatorRegister" : false,
+  "webAuthnPolicyAcceptableAaguids" : [ ],
+  "webAuthnPolicyPasswordlessRpEntityName" : "keycloak",
+  "webAuthnPolicyPasswordlessSignatureAlgorithms" : [ "ES256" ],
+  "webAuthnPolicyPasswordlessRpId" : "",
+  "webAuthnPolicyPasswordlessAttestationConveyancePreference" : "not specified",
+  "webAuthnPolicyPasswordlessAuthenticatorAttachment" : "not specified",
+  "webAuthnPolicyPasswordlessRequireResidentKey" : "not specified",
+  "webAuthnPolicyPasswordlessUserVerificationRequirement" : "not specified",
+  "webAuthnPolicyPasswordlessCreateTimeout" : 0,
+  "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister" : false,
+  "webAuthnPolicyPasswordlessAcceptableAaguids" : [ ],
+  "users" : [ {
+    "id" : "52cddd46-251c-4534-acc8-0580eeafb577",
+    "createdTimestamp" : 1684736014759,
+    "username" : "admin",
+    "enabled" : true,
+    "totp" : false,
+    "emailVerified" : true,
+    "firstName" : "Dataverse",
+    "lastName" : "Admin",
+    "email" : "dataverse-admin@mailinator.com",
+    "credentials" : [ {
+      "id" : "28f1ece7-26fb-40f1-9174-5ffce7b85c0a",
+      "type" : "password",
+      "userLabel" : "Set to \"admin\"",
+      "createdDate" : 1684736057302,
+      "secretData" : "{\"value\":\"ONI7fl6BmooVTUgwN1W3m7hsRjMAYEr2l+Fp5+7IOYw1iIntwvZ3U3W0ZBcCFJ7uhcKqF101+rueM3dZfoshPQ==\",\"salt\":\"Hj7co7zYVei7xwx8EaYP3A==\",\"additionalParameters\":{}}",
+      "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
+    } ],
+    "disableableCredentialTypes" : [ ],
+    "requiredActions" : [ ],
+    "realmRoles" : [ "default-roles-test" ],
+    "notBefore" : 0,
+    "groups" : [ "/admins" ]
+  }, {
+    "id" : "a3d8e76d-7e7b-42dc-bbd7-4258818a8a1b",
+    "createdTimestamp" : 1684755806552,
+    "username" : "affiliate",
+    "enabled" : true,
+    "totp" : false,
+    "emailVerified" : true,
+    "firstName" : "Dataverse",
+    "lastName" : "Affiliate",
+    "email" : "dataverse-affiliate@mailinator.com",
+    "credentials" : [ {
+      "id" : "31c8eb1e-b2a8-4f86-833b-7c0536cd61a1",
+      "type" : "password",
+      "userLabel" : "My password",
+      "createdDate" : 1684755821743,
+      "secretData" : "{\"value\":\"T+RQ4nvmjknj7ds8NU7782j6PJ++uCu98zNoDQjIe9IKXah+13q4EcXO9IHmi2BJ7lgT0OIzwIoac4JEQLxhjQ==\",\"salt\":\"fnRmE9WmjAp4tlvGh/bxxQ==\",\"additionalParameters\":{}}",
+      "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
+    } ],
+    "disableableCredentialTypes" : [ ],
+    "requiredActions" : [ ],
+    "realmRoles" : [ "default-roles-test" ],
+    "notBefore" : 0,
+    "groups" : [ ]
+  }, {
+    "id" : "e5531496-cfb8-498c-a902-50c98d649e79",
+    "createdTimestamp" : 1684755721064,
+    "username" : "curator",
+    "enabled" : true,
+    "totp" : false,
+    "emailVerified" : true,
+    "firstName" : "Dataverse",
+    "lastName" : "Curator",
+    "email" : "dataverse-curator@mailinator.com",
+    "credentials" : [ {
+      "id" : "664546b4-b936-45cf-a4cf-5e98b743fc7f",
+      "type" : "password",
+      "userLabel" : "My password",
+      "createdDate" : 1684755740776,
+      "secretData" : "{\"value\":\"AvVqybCNtCBVAdLEeJKresy9tc3c4BBUQvu5uHVQw4IjVagN6FpKGlDEKOrxhzdSM8skEvthOEqJkloPo1w+NQ==\",\"salt\":\"2em2DDRRlNEYsNR3xDqehw==\",\"additionalParameters\":{}}",
+      "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
+    } ],
+    "disableableCredentialTypes" : [ ],
+    "requiredActions" : [ ],
+    "realmRoles" : [ "default-roles-test" ],
+    "notBefore" : 0,
+    "groups" : [ "/curators" ]
+  }, {
+    "id" : "c0082e7e-a3e9-45e6-95e9-811a34adce9d",
+    "createdTimestamp" : 1684755585802,
+    "username" : "user",
+    "enabled" : true,
+    "totp" : false,
+    "emailVerified" : true,
+    "firstName" : "Dataverse",
+    "lastName" : "User",
+    "email" : "dataverse-user@mailinator.com",
+    "credentials" : [ {
+      "id" : "00d6d67f-2e30-4da6-a567-bec38a1886a0",
+      "type" : "password",
+      "userLabel" : "My password",
+      "createdDate" : 1684755599597,
+      "secretData" : "{\"value\":\"z991rnjznAgosi5nX962HjM8/gN5GLJTdrlvi6G9cj8470X2/oZUb4Lka6s8xImgtEloCgWiKqH0EH9G4Y3a5A==\",\"salt\":\"/Uz7w+2IqDo+fQUGqxjVHw==\",\"additionalParameters\":{}}",
+      "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
+    } ],
+    "disableableCredentialTypes" : [ ],
+    "requiredActions" : [ ],
+    "realmRoles" : [ "default-roles-test" ],
+    "notBefore" : 0,
+    "groups" : [ "/members" ]
+  } ],
+  "scopeMappings" : [ {
+    "clientScope" : "offline_access",
+    "roles" : [ "offline_access" ]
+  } ],
+  "clientScopeMappings" : {
+    "account" : [ {
+      "client" : "account-console",
+      "roles" : [ "manage-account" ]
+    } ]
+  },
+  "clients" : [ {
+    "id" : "77f8127a-261e-4cd8-a77d-b74a389f7fd4",
+    "clientId" : "account",
+    "name" : "${client_account}",
+    "rootUrl" : "${authBaseUrl}",
+    "baseUrl" : "/realms/test/account/",
+    "surrogateAuthRequired" : false,
+    "enabled" : true,
+    "alwaysDisplayInConsole" : false,
+    "clientAuthenticatorType" : "client-secret",
+    "redirectUris" : [ "/realms/test/account/*" ],
+    "webOrigins" : [ ],
+    "notBefore" : 0,
+    "bearerOnly" : false,
+    "consentRequired" : false,
+    "standardFlowEnabled" : true,
+    "implicitFlowEnabled" : false,
+    "directAccessGrantsEnabled" : false,
+    "serviceAccountsEnabled" : false,
+    "publicClient" : true,
+    "frontchannelLogout" : false,
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "post.logout.redirect.uris" : "+"
+    },
+    "authenticationFlowBindingOverrides" : { },
+    "fullScopeAllowed" : false,
+    "nodeReRegistrationTimeout" : 0,
+    "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ],
+    "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ]
+  }, {
+    "id" : "5d99f721-027c-478d-867d-61114e0a8192",
+    "clientId" : "account-console",
+    "name" : "${client_account-console}",
+    "rootUrl" : "${authBaseUrl}",
+    "baseUrl" : "/realms/test/account/",
+    "surrogateAuthRequired" : false,
+    "enabled" : true,
+    "alwaysDisplayInConsole" : false,
+    "clientAuthenticatorType" : "client-secret",
+    "redirectUris" : [ "/realms/test/account/*" ],
+    "webOrigins" : [ ],
+    "notBefore" : 0,
+    "bearerOnly" : false,
+    "consentRequired" : false,
+    "standardFlowEnabled" : true,
+    "implicitFlowEnabled" : false,
+    "directAccessGrantsEnabled" : false,
+    "serviceAccountsEnabled" : false,
+    "publicClient" : true,
+    "frontchannelLogout" : false,
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "post.logout.redirect.uris" : "+",
+      "pkce.code.challenge.method" : "S256"
+    },
+    "authenticationFlowBindingOverrides" : { },
+    "fullScopeAllowed" : false,
+    "nodeReRegistrationTimeout" : 0,
+    "protocolMappers" : [ {
+      "id" : "e181a0ce-9a04-4468-a38a-aaef9f78f989",
+      "name" : "audience resolve",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-audience-resolve-mapper",
+      "consentRequired" : false,
+      "config" : { }
+    } ],
+    "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ],
+    "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ]
+  }, {
+    "id" : "5eccc178-121e-4d0f-bcb2-04ae3c2e52ed",
+    "clientId" : "admin-cli",
+    "name" : "${client_admin-cli}",
+    "surrogateAuthRequired" : false,
+    "enabled" : true,
+    "alwaysDisplayInConsole" : false,
+    "clientAuthenticatorType" : "client-secret",
+    "redirectUris" : [ ],
+    "webOrigins" : [ ],
+    "notBefore" : 0,
+    "bearerOnly" : false,
+    "consentRequired" : false,
+    "standardFlowEnabled" : false,
+    "implicitFlowEnabled" : false,
+    "directAccessGrantsEnabled" : true,
+    "serviceAccountsEnabled" : false,
+    "publicClient" : true,
+    "frontchannelLogout" : false,
+    "protocol" : "openid-connect",
+    "attributes" : { },
+    "authenticationFlowBindingOverrides" : { },
+    "fullScopeAllowed" : false,
+    "nodeReRegistrationTimeout" : 0,
+    "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ],
+    "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ]
+  }, {
+    "id" : "b57d62bb-77ff-42bd-b8ff-381c7288f327",
+    "clientId" : "broker",
+    "name" : "${client_broker}",
+    "surrogateAuthRequired" : false,
+    "enabled" : true,
+    "alwaysDisplayInConsole" : false,
+    "clientAuthenticatorType" : "client-secret",
+    "redirectUris" : [ ],
+    "webOrigins" : [ ],
+    "notBefore" : 0,
+    "bearerOnly" : true,
+    "consentRequired" : false,
+    "standardFlowEnabled" : true,
+    "implicitFlowEnabled" : false,
+    "directAccessGrantsEnabled" : false,
+    "serviceAccountsEnabled" : false,
+    "publicClient" : false,
+    "frontchannelLogout" : false,
+    "protocol" : "openid-connect",
+    "attributes" : { },
+    "authenticationFlowBindingOverrides" : { },
+    "fullScopeAllowed" : false,
+    "nodeReRegistrationTimeout" : 0,
+    "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ],
+    "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ]
+  }, {
+    "id" : "dada0ae8-ee9f-415a-9685-42da7c563660",
+    "clientId" : "realm-management",
+    "name" : "${client_realm-management}",
+    "surrogateAuthRequired" : false,
+    "enabled" : true,
+    "alwaysDisplayInConsole" : false,
+    "clientAuthenticatorType" : "client-secret",
+    "redirectUris" : [ ],
+    "webOrigins" : [ ],
+    "notBefore" : 0,
+    "bearerOnly" : true,
+    "consentRequired" : false,
+    "standardFlowEnabled" : true,
+    "implicitFlowEnabled" : false,
+    "directAccessGrantsEnabled" : false,
+    "serviceAccountsEnabled" : false,
+    "publicClient" : false,
+    "frontchannelLogout" : false,
+    "protocol" : "openid-connect",
+    "attributes" : { },
+    "authenticationFlowBindingOverrides" : { },
+    "fullScopeAllowed" : false,
+    "nodeReRegistrationTimeout" : 0,
+    "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ],
+    "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ]
+  }, {
+    "id" : "bf7cf550-3875-4f97-9878-b2419a854058",
+    "clientId" : "security-admin-console",
+    "name" : "${client_security-admin-console}",
+    "rootUrl" : "${authAdminUrl}",
+    "baseUrl" : "/admin/test/console/",
+    "surrogateAuthRequired" : false,
+    "enabled" : true,
+    "alwaysDisplayInConsole" : false,
+    "clientAuthenticatorType" : "client-secret",
+    "redirectUris" : [ "/admin/test/console/*" ],
+    "webOrigins" : [ "+" ],
+    "notBefore" : 0,
+    "bearerOnly" : false,
+    "consentRequired" : false,
+    "standardFlowEnabled" : true,
+    "implicitFlowEnabled" : false,
+    "directAccessGrantsEnabled" : false,
+    "serviceAccountsEnabled" : false,
+    "publicClient" : true,
+    "frontchannelLogout" : false,
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "post.logout.redirect.uris" : "+",
+      "pkce.code.challenge.method" : "S256"
+    },
+    "authenticationFlowBindingOverrides" : { },
+    "fullScopeAllowed" : false,
+    "nodeReRegistrationTimeout" : 0,
+    "protocolMappers" : [ {
+      "id" : "ff845e16-e200-4894-ab51-37d8b9f2a445",
+      "name" : "locale",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "locale",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "locale",
+        "jsonType.label" : "String"
+      }
+    } ],
+    "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ],
+    "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ]
+  }, {
+    "id" : "9c27faa8-4b8d-4ad9-9cd1-880032ef06aa",
+    "clientId" : "test",
+    "name" : "A Test Client",
+    "description" : "Use for hacking and testing away a confidential client",
+    "rootUrl" : "",
+    "adminUrl" : "",
+    "baseUrl" : "",
+    "surrogateAuthRequired" : false,
+    "enabled" : true,
+    "alwaysDisplayInConsole" : false,
+    "clientAuthenticatorType" : "client-secret",
+    "secret" : "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8",
+    "redirectUris" : [ "*" ],
+    "webOrigins" : [ ],
+    "notBefore" : 0,
+    "bearerOnly" : false,
+    "consentRequired" : false,
+    "standardFlowEnabled" : true,
+    "implicitFlowEnabled" : false,
+    "directAccessGrantsEnabled" : true,
+    "serviceAccountsEnabled" : false,
+    "publicClient" : false,
+    "frontchannelLogout" : true,
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "oidc.ciba.grant.enabled" : "false",
+      "client.secret.creation.time" : "1684735831",
+      "backchannel.logout.session.required" : "true",
+      "display.on.consent.screen" : "false",
+      "oauth2.device.authorization.grant.enabled" : "false",
+      "backchannel.logout.revoke.offline.tokens" : "false"
+    },
+    "authenticationFlowBindingOverrides" : { },
+    "fullScopeAllowed" : true,
+    "nodeReRegistrationTimeout" : -1,
+    "defaultClientScopes" : [ "web-origins", "acr", "roles", "profile", "email" ],
+    "optionalClientScopes" : [ "address", "phone", "offline_access", "microprofile-jwt" ]
+  } ],
+  "clientScopes" : [ {
+    "id" : "72f29e57-92fa-437b-828c-2b9d6fe56192",
+    "name" : "address",
+    "description" : "OpenID Connect built-in scope: address",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "include.in.token.scope" : "true",
+      "display.on.consent.screen" : "true",
+      "consent.screen.text" : "${addressScopeConsentText}"
+    },
+    "protocolMappers" : [ {
+      "id" : "59581aea-70d6-4ee8-bec2-1fea5fc497ae",
+      "name" : "address",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-address-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "user.attribute.formatted" : "formatted",
+        "user.attribute.country" : "country",
+        "user.attribute.postal_code" : "postal_code",
+        "userinfo.token.claim" : "true",
+        "user.attribute.street" : "street",
+        "id.token.claim" : "true",
+        "user.attribute.region" : "region",
+        "access.token.claim" : "true",
+        "user.attribute.locality" : "locality"
+      }
+    } ]
+  }, {
+    "id" : "f515ec81-3c1b-4d4d-b7a2-e7e8d47b6447",
+    "name" : "roles",
+    "description" : "OpenID Connect scope for add user roles to the access token",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "include.in.token.scope" : "false",
+      "display.on.consent.screen" : "true",
+      "consent.screen.text" : "${rolesScopeConsentText}"
+    },
+    "protocolMappers" : [ {
+      "id" : "26d299a8-69e2-4864-9595-17a5b417fc61",
+      "name" : "realm roles",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-realm-role-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "user.attribute" : "foo",
+        "access.token.claim" : "true",
+        "claim.name" : "realm_access.roles",
+        "jsonType.label" : "String",
+        "multivalued" : "true"
+      }
+    }, {
+      "id" : "d2998083-a8db-4f4e-9aaa-9cad68d65b97",
+      "name" : "audience resolve",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-audience-resolve-mapper",
+      "consentRequired" : false,
+      "config" : { }
+    }, {
+      "id" : "7a4cb2e5-07a0-4c16-a024-71df7ddd6868",
+      "name" : "client roles",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-client-role-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "user.attribute" : "foo",
+        "access.token.claim" : "true",
+        "claim.name" : "resource_access.${client_id}.roles",
+        "jsonType.label" : "String",
+        "multivalued" : "true"
+      }
+    } ]
+  }, {
+    "id" : "8f1eafef-92d6-434e-b9ec-6edec1fddd0a",
+    "name" : "offline_access",
+    "description" : "OpenID Connect built-in scope: offline_access",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "consent.screen.text" : "${offlineAccessScopeConsentText}",
+      "display.on.consent.screen" : "true"
+    }
+  }, {
+    "id" : "c03095aa-b656-447a-9767-0763c2ccb070",
+    "name" : "acr",
+    "description" : "OpenID Connect scope for add acr (authentication context class reference) to the token",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "include.in.token.scope" : "false",
+      "display.on.consent.screen" : "false"
+    },
+    "protocolMappers" : [ {
+      "id" : "948b230c-56d0-4000-937c-841cd395d3f9",
+      "name" : "acr loa level",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-acr-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "id.token.claim" : "true",
+        "access.token.claim" : "true"
+      }
+    } ]
+  }, {
+    "id" : "cdf35f63-8ec7-41a0-ae12-f05d415818cc",
+    "name" : "phone",
+    "description" : "OpenID Connect built-in scope: phone",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "include.in.token.scope" : "true",
+      "display.on.consent.screen" : "true",
+      "consent.screen.text" : "${phoneScopeConsentText}"
+    },
+    "protocolMappers" : [ {
+      "id" : "ba4348ff-90b1-4e09-89a8-e5c08b04d3d1",
+      "name" : "phone number",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "phoneNumber",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "phone_number",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "e6cceae5-8392-4348-b302-f610ece6056e",
+      "name" : "phone number verified",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "phoneNumberVerified",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "phone_number_verified",
+        "jsonType.label" : "boolean"
+      }
+    } ]
+  }, {
+    "id" : "4318001c-2970-41d3-91b9-e31c08569872",
+    "name" : "email",
+    "description" : "OpenID Connect built-in scope: email",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "include.in.token.scope" : "true",
+      "display.on.consent.screen" : "true",
+      "consent.screen.text" : "${emailScopeConsentText}"
+    },
+    "protocolMappers" : [ {
+      "id" : "406d02a6-866a-4962-8838-e8c58ada1505",
+      "name" : "email",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-property-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "email",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "email",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "33baabc1-9bf2-42e4-8b8e-a53c13f0b744",
+      "name" : "email verified",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-property-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "emailVerified",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "email_verified",
+        "jsonType.label" : "boolean"
+      }
+    } ]
+  }, {
+    "id" : "5277a84f-d727-4c64-8432-d513127beee1",
+    "name" : "profile",
+    "description" : "OpenID Connect built-in scope: profile",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "include.in.token.scope" : "true",
+      "display.on.consent.screen" : "true",
+      "consent.screen.text" : "${profileScopeConsentText}"
+    },
+    "protocolMappers" : [ {
+      "id" : "0a609875-2678-4056-93ef-dd5c03e6059d",
+      "name" : "given name",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-property-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "firstName",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "given_name",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "7c510d18-07ee-4b78-8acd-24b777d11b3c",
+      "name" : "website",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "website",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "website",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "0bb6d0ea-195f-49e8-918c-c419a26a661c",
+      "name" : "username",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-property-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "username",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "preferred_username",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "5f1e644c-1acf-440c-b1a6-b5f65bcebfd9",
+      "name" : "profile",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "profile",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "profile",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "c710bdb2-6cfd-4f60-9c4e-730188fc62f7",
+      "name" : "family name",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-property-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "lastName",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "family_name",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "012d5038-0e13-42ba-9df7-2487c8e2eead",
+      "name" : "nickname",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "nickname",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "nickname",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "21590b19-517d-4b6d-92f6-d4f71238677e",
+      "name" : "updated at",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "updatedAt",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "updated_at",
+        "jsonType.label" : "long"
+      }
+    }, {
+      "id" : "e4cddca7-1360-42f3-9854-da6cbe00c71e",
+      "name" : "birthdate",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "birthdate",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "birthdate",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "afee328f-c64c-43e6-80d0-be2721c2ed0e",
+      "name" : "locale",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "locale",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "locale",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "780a1e2c-5b63-46f4-a5bf-dc3fd8ce0cbb",
+      "name" : "full name",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-full-name-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "userinfo.token.claim" : "true"
+      }
+    }, {
+      "id" : "aeebffff-f776-427e-83ed-064707ffce57",
+      "name" : "zoneinfo",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "zoneinfo",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "zoneinfo",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "b3e840a2-1794-4da1-bf69-31905cbff0d6",
+      "name" : "middle name",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "middleName",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "middle_name",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "0607e0e4-4f7f-4214-996d-3599772ce1c7",
+      "name" : "picture",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "picture",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "picture",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "426a609b-4e28-4132-af0d-13297b8cb63a",
+      "name" : "gender",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-attribute-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "gender",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "gender",
+        "jsonType.label" : "String"
+      }
+    } ]
+  }, {
+    "id" : "a1ebde82-ce21-438f-a3ad-261d3eeb1c01",
+    "name" : "role_list",
+    "description" : "SAML role list",
+    "protocol" : "saml",
+    "attributes" : {
+      "consent.screen.text" : "${samlRoleListScopeConsentText}",
+      "display.on.consent.screen" : "true"
+    },
+    "protocolMappers" : [ {
+      "id" : "64653ac7-7ffc-4f7c-a589-03e3b68bbd25",
+      "name" : "role list",
+      "protocol" : "saml",
+      "protocolMapper" : "saml-role-list-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "single" : "false",
+        "attribute.nameformat" : "Basic",
+        "attribute.name" : "Role"
+      }
+    } ]
+  }, {
+    "id" : "aeb5b852-dfec-4e67-9d9e-104abe9b3bf2",
+    "name" : "web-origins",
+    "description" : "OpenID Connect scope for add allowed web origins to the access token",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "include.in.token.scope" : "false",
+      "display.on.consent.screen" : "false",
+      "consent.screen.text" : ""
+    },
+    "protocolMappers" : [ {
+      "id" : "e2fa8437-a0f1-46fc-af9c-c40fc09cd6a1",
+      "name" : "allowed web origins",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-allowed-origins-mapper",
+      "consentRequired" : false,
+      "config" : { }
+    } ]
+  }, {
+    "id" : "4fecd0d7-d4ad-457e-90f2-c7202bf01ff5",
+    "name" : "microprofile-jwt",
+    "description" : "Microprofile - JWT built-in scope",
+    "protocol" : "openid-connect",
+    "attributes" : {
+      "include.in.token.scope" : "true",
+      "display.on.consent.screen" : "false"
+    },
+    "protocolMappers" : [ {
+      "id" : "a9536634-a9f6-4ed5-a8e7-8379d3b002ca",
+      "name" : "upn",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-property-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "userinfo.token.claim" : "true",
+        "user.attribute" : "username",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "upn",
+        "jsonType.label" : "String"
+      }
+    }, {
+      "id" : "2ce1a702-9458-4926-9b8a-f82c07215755",
+      "name" : "groups",
+      "protocol" : "openid-connect",
+      "protocolMapper" : "oidc-usermodel-realm-role-mapper",
+      "consentRequired" : false,
+      "config" : {
+        "multivalued" : "true",
+        "user.attribute" : "foo",
+        "id.token.claim" : "true",
+        "access.token.claim" : "true",
+        "claim.name" : "groups",
+        "jsonType.label" : "String"
+      }
+    } ]
+  } ],
+  "defaultDefaultClientScopes" : [ "role_list", "profile", "email", "roles", "web-origins", "acr" ],
+  "defaultOptionalClientScopes" : [ "offline_access", "address", "phone", "microprofile-jwt" ],
+  "browserSecurityHeaders" : {
+    "contentSecurityPolicyReportOnly" : "",
+    "xContentTypeOptions" : "nosniff",
+    "xRobotsTag" : "none",
+    "xFrameOptions" : "SAMEORIGIN",
+    "contentSecurityPolicy" : "frame-src 'self'; frame-ancestors 'self'; object-src 'none';",
+    "xXSSProtection" : "1; mode=block",
+    "strictTransportSecurity" : "max-age=31536000; includeSubDomains"
+  },
+  "smtpServer" : { },
+  "eventsEnabled" : false,
+  "eventsListeners" : [ "jboss-logging" ],
+  "enabledEventTypes" : [ ],
+  "adminEventsEnabled" : false,
+  "adminEventsDetailsEnabled" : false,
+  "identityProviders" : [ ],
+  "identityProviderMappers" : [ ],
+  "components" : {
+    "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy" : [ {
+      "id" : "8115796f-8f1f-4d6a-88f8-ca2938451260",
+      "name" : "Allowed Client Scopes",
+      "providerId" : "allowed-client-templates",
+      "subType" : "authenticated",
+      "subComponents" : { },
+      "config" : {
+        "allow-default-scopes" : [ "true" ]
+      }
+    }, {
+      "id" : "044bd055-714d-478e-aa93-303d2161c427",
+      "name" : "Allowed Protocol Mapper Types",
+      "providerId" : "allowed-protocol-mappers",
+      "subType" : "authenticated",
+      "subComponents" : { },
+      "config" : {
+        "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper" ]
+      }
+    }, {
+      "id" : "be465734-3b0f-4370-a144-73db756e23f8",
+      "name" : "Allowed Protocol Mapper Types",
+      "providerId" : "allowed-protocol-mappers",
+      "subType" : "anonymous",
+      "subComponents" : { },
+      "config" : {
+        "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ]
+      }
+    }, {
+      "id" : "42a2f64d-ac9e-4221-9cf6-40ff8c868629",
+      "name" : "Trusted Hosts",
+      "providerId" : "trusted-hosts",
+      "subType" : "anonymous",
+      "subComponents" : { },
+      "config" : {
+        "host-sending-registration-request-must-match" : [ "true" ],
+        "client-uris-must-match" : [ "true" ]
+      }
+    }, {
+      "id" : "7ca08915-6c33-454c-88f2-20e1d6553b26",
+      "name" : "Max Clients Limit",
+      "providerId" : "max-clients",
+      "subType" : "anonymous",
+      "subComponents" : { },
+      "config" : {
+        "max-clients" : [ "200" ]
+      }
+    }, {
+      "id" : "f01f2b6f-3f01-4d01-b2f4-70577c6f599c",
+      "name" : "Allowed Client Scopes",
+      "providerId" : "allowed-client-templates",
+      "subType" : "anonymous",
+      "subComponents" : { },
+      "config" : {
+        "allow-default-scopes" : [ "true" ]
+      }
+    }, {
+      "id" : "516d7f21-f21a-4690-831e-36ad313093b2",
+      "name" : "Consent Required",
+      "providerId" : "consent-required",
+      "subType" : "anonymous",
+      "subComponents" : { },
+      "config" : { }
+    }, {
+      "id" : "c79df6a0-d4d8-4866-b9e6-8ddb5d1bd38e",
+      "name" : "Full Scope Disabled",
+      "providerId" : "scope",
+      "subType" : "anonymous",
+      "subComponents" : { },
+      "config" : { }
+    } ],
+    "org.keycloak.userprofile.UserProfileProvider" : [ {
+      "id" : "cf47a21f-c8fb-42f2-9bff-feca967db183",
+      "providerId" : "declarative-user-profile",
+      "subComponents" : { },
+      "config" : { }
+    } ],
+    "org.keycloak.keys.KeyProvider" : [ {
+      "id" : "6b4a2281-a9e8-43ab-aee7-190ae91b2842",
+      "name" : "aes-generated",
+      "providerId" : "aes-generated",
+      "subComponents" : { },
+      "config" : {
+        "kid" : [ "47b9c2c2-32dc-4317-bd8b-1c4e5bb740ca" ],
+        "secret" : [ "9VWsVSqbj5zWa8Mq-rRzOw" ],
+        "priority" : [ "100" ]
+      }
+    }, {
+      "id" : "68e2d2b0-4976-480f-ab76-f84a17686b05",
+      "name" : "rsa-enc-generated",
+      "providerId" : "rsa-enc-generated",
+      "subComponents" : { },
+      "config" : {
+        "privateKey" : [ "MIIEpQIBAAKCAQEAwuIcVVJDncorsQcFef4M/J9dsaNNmwEv/+4pCSZuco7IlA9uCfvwjYgfwQlWoCHCc7JFEtUOXhpLNR0SJ9w2eCC9A/0horjLmiVGU5sGACGrAxSgipt399k83mtkPBTikT1BXumPrX51ovdEPVPQSO0hIBwFn4ZDwA9P/00jNzzswyLC2UDdQrwIjm2xWjq1X82d8mL3+Yp8lF9qD1w305+XPiqCC+TUunKsuCQq5sddet+UoCDsFQyxsJi6cWJrryDvQmiDgM2wm68jn6hyzDE76J1az0wKEGqoMEwIy0juqZCyAqgsm3xA+zHpTcI3EyTwDGpMvWNJp8AWqXPNaQIDAQABAoIBAAethL1+n/6WpUBEaoHcVrq5/2+vo0+dfTyVZNKRFqtG0WOWPzOflFd1HZV7YVPuJI+uPi8ANmsnbh9YcaYg9JiTZ0hMZ++giBf0ID2hZxv995NyXnf7fkoFKghevYG+9mVPtHRmxKlKiPFWfHQjP1ACNKAD2UZdcdbzxicaIkPV/hP996mZA3xaaudggAJq7u/W67H2Q6ofGqW4TI5241d8T+6yobbvXRe4n8FKz4eK2aZv+N+zwh5JDMsJ8050+lCDsyoyakEPf+4veuPkewx4FemAiotDNcmoUQSDL26wLw8kk1uZ9JY0M88OL5pMyBuxTqy0F6BWBltq80mlefECgYEA4vZ8Agu2plXOzWASn0dyhCel3QoeUqNY8D8A+0vK9qWxUE9jMG13jAZmsL2I38SuwRN1DhJezbrn4QTuxTukxgSjLDv/pBp9UnXnCz/fg4yPTYsZ0zHqTMbwvdtfIzBHTCYyIJ+unxVYoenC0XZKSQXA3NN2zNqYpLhjStWdEZECgYEA29DznJxpDZsRUieRxFgZ+eRCjbQ9Q2A46preqMo1KOZ6bt9avxG3uM7pUC+UOeIizeRzxPSJ2SyptYPzdaNwKN3Lq+RhjHe1zYLngXb0CIQaRwNHqePxXF1sg0dTbmcxf+Co7yPG+Nd5nrQq9SQHC3tLTyL6x3VU/yAfMQqUklkCgYEAyVl8iGAV6RkE/4R04OOEv6Ng7WkVn6CUvYZXe5kw9YHnfWUAjS0AOrRPFAsBy+r0UgvN8+7uNjvTjPhQT5/rPVVN4WdVEyQA/E/m6j7/LvhbBaMbBRcqUnTHjNd6XoBtMCxOmkyvoShR2krE8AiuPHwjLoVXxsNDWhbO18wMrVECgYEAlmkICOXNzI2K8Jg62gse2yshjy0BrpSs3XtTWFPkxDPRGwSiZ5OMD10lsMSdvG3MOu5TeTWLDZvOFHJRqPFI0e3Sa7A+P4u6TwF/v8rRePJLuMO5ybo7cWRL2Bh6MlVSPZpQfjIQ+D0Y70uBCXS5jVW0VlYtG0Zh/qDQNxJyTyECgYEAuRINlZ0ag+1QTITapSatbFWd/KquGLpMjZyF4k5gVHs+4zHnnTi1YIDUInp1FJBqKD27z2byy7KFgbMBZQmsDs8i4fgzQrJHe3D4WFFHCjiClbeReejbas9bOnqhSQCiIy1Ck8vMAriAtctSA/g/qq6dQApSgcWaKvTVL2Ywa7E=" ],
+        "keyUse" : [ "ENC" ],
+        "certificate" : [ "MIIClzCCAX8CBgGIQhOIijANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0ZXN0MB4XDTIzMDUyMjA2MDczNloXDTMzMDUyMjA2MDkxNlowDzENMAsGA1UEAwwEdGVzdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMLiHFVSQ53KK7EHBXn+DPyfXbGjTZsBL//uKQkmbnKOyJQPbgn78I2IH8EJVqAhwnOyRRLVDl4aSzUdEifcNnggvQP9IaK4y5olRlObBgAhqwMUoIqbd/fZPN5rZDwU4pE9QV7pj61+daL3RD1T0EjtISAcBZ+GQ8APT/9NIzc87MMiwtlA3UK8CI5tsVo6tV/NnfJi9/mKfJRfag9cN9Oflz4qggvk1LpyrLgkKubHXXrflKAg7BUMsbCYunFia68g70Jog4DNsJuvI5+ocswxO+idWs9MChBqqDBMCMtI7qmQsgKoLJt8QPsx6U3CNxMk8AxqTL1jSafAFqlzzWkCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAIEIfjqOr2m+8s2RR8VW/nBgOgu9HtPRda4qNhGbgBkZ8NDy7TwHqlHo1ujKW5RO438pRyLJmOibWN4a/rkUsSjin6vgy4l8KpQy+7a4cQCQHyl34TmPjbtiw1jKgiOjzRQY54NVwIJNMIMc1ZyQo4u0U30/FxgUv6akXfS5O1ePD+5xKOOC/Af9AletjhQMPwVxXDwFqfQf/p+SM4Pyn4L633MESfDrH8v9FjJd0lV5ZlEI4hpPtnbi9U+CInqCy3VDNlZjsXswaDRujjg3LERfOMvCgj+Dck3FzWG7EiCwXWNEPvdMzv4w7M6KXuiPPQkST8DUWjgkjUCeLBzT3yw==" ],
+        "priority" : [ "100" ],
+        "algorithm" : [ "RSA-OAEP" ]
+      }
+    }, {
+      "id" : "728769a3-99a4-4cca-959d-28181dfee7e8",
+      "name" : "rsa-generated",
+      "providerId" : "rsa-generated",
+      "subComponents" : { },
+      "config" : {
+        "privateKey" : [ "MIIEowIBAAKCAQEAxIszQCv8bX3sKXJVtuLJV6cH/uhkzxcTEIcDe7y2Y2SFM0x2nF6wRLk8QkvIrRmelilegUIJttqZxLXMpxwUJGizehHQMrOCzNoGBZdVanoK7nNa5+FOYtlvL4GxNfwzS36sp3PnKQiGv5Q7RGuPthjLFfqTmYx/7GTDJC4vLEW5S01Vy/Xc9FE4FsT0hnm91lRWjppc9893M5QUy/TPu8udIuNV87Ko5yiIxQqcPiAQXJaN4CyGaDcYhhzzHdxVptIk2FvtxhpmNxrbtmBCx/o9/rBDQNTis8Ex6ItWC2PvC17UPvyOcZ4Fv/qO0L6JZ0mrpH95CeDU1kEP+KKZrwIDAQABAoIBAGGl6SYiVG1PyTQEXqqY/UCjt3jBnEg5ZhrpgWUKKrGyAO2uOSXSc5AJWfN0NHUwC9b+IbplhW8IJ6qQSmfiLu2x6S2mSQLPphZB4gkIGYNntCOpQ0p+aZP6BGAddt5j+VYyTvR5RKlh15S6QEHrkMB/i/LVBl0c7XeUzlEc8wnyj8DGvlmpcQzIcbWfqEZ/FciDdKGNN0M4V/r1uQiOUVZ69SWDBBwu41YwF7PYUsX83q8zn0nBeMqz0ggSf33lW4w31fox9c7EjIF01gPArE5uT+d+AwjVKHpd08LWGR9W9NSXVOPUKkzOM+PyvKGvzjMnlrm/feqowKQbL2q/GP0CgYEA/EsrvUojkFIWxHc19KJdJvqlYgLeWq6P/J7UmHgpl+S3nG6b9HH4/aM/ICDa5hxd5bmP5p2V3EuZWnyb6/QB5eipC7Ss3oM7XeS/PwvTp6NTC1fypx2zHKse3iuLeCGneRxiw15mB02ArJ/qJw/VSQK2J7RiR4+b6HYpdzQnIysCgYEAx25dTQqskQqsx/orJzuUqfNv/C0W4vqfz1eL3akFrdK+YqghXKFsDmh61JpTrTKnRLAdQeyOrhKwbNsdxSEEaeeLayKLVlimoFXGd/LZb5LQiwFcrvTzhnB+FLmFgqTnuLkpfY1woHEwSW9TpJewjbT9S6g0L2uh223nVXuLMY0CgYEA3pMOlmMGtvbEoTSuRBDNb2rmZm4zbfrcijgxRAWWZCtiFL68FU5LJLBVK2nw09sot1cabZCOuhdzxhFymRneZs73+5y8eV17DV2VnvA3HIiI5dQD/YzFDECm7ceqtiOylLUHKGZqSn0ETMaTkzxzpIKg4qxPm+RE3jMIZ+J5uJsCgYBk2iUIrtsxxgo2Xwavomu9vkPlbQ/j3QYwHn+2qqEalDZ/QbMNWvyAFMn49cpXDgSUsdM54V0OHpllkzFs3ROUUumoViHMmqw47OefBQp8Z+xaP2gVef4lAIJiDKe9t5MPUWPwADTyjgrzN/8+fw9juiFVv0wUpwOFKgEQs5diiQKBgC6RpZESc5Nl4nHrDvIl5n/zYED6BaXoLl15NhcoBudt5SIRO/RpvBW69A7aE/UK6p7WXjq4mP1ssIWz4KgATCoXUgYvn0a7Ql79r/CMce6/FvcuweED6u6bD0kdXuYhe8fR9IPmLfnnb4Cx3JOJeRZbiBSP5HOZJ7nsKibxcgPm" ],
+        "keyUse" : [ "SIG" ],
+        "certificate" : [ "MIIClzCCAX8CBgGIQhOHjjANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDAR0ZXN0MB4XDTIzMDUyMjA2MDczNloXDTMzMDUyMjA2MDkxNlowDzENMAsGA1UEAwwEdGVzdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMSLM0Ar/G197ClyVbbiyVenB/7oZM8XExCHA3u8tmNkhTNMdpxesES5PEJLyK0ZnpYpXoFCCbbamcS1zKccFCRos3oR0DKzgszaBgWXVWp6Cu5zWufhTmLZby+BsTX8M0t+rKdz5ykIhr+UO0Rrj7YYyxX6k5mMf+xkwyQuLyxFuUtNVcv13PRROBbE9IZ5vdZUVo6aXPfPdzOUFMv0z7vLnSLjVfOyqOcoiMUKnD4gEFyWjeAshmg3GIYc8x3cVabSJNhb7cYaZjca27ZgQsf6Pf6wQ0DU4rPBMeiLVgtj7wte1D78jnGeBb/6jtC+iWdJq6R/eQng1NZBD/iima8CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAe0Bo1UpGfpOlJiVhp0XWExm8bdxFgXOU2M5XeZBsWAqBehvJkzn+tbAtlVNiIiN58XFFpH+xLZ2nJIZR5FHeCD3bYAgK72j5k45HJI95vPyslelfT/m3Np78+1iUa1U1WxN40JaowP1EeTkk5O8Pk4zTQ1Ne1usmKd+SJxI1KWN0kKuVFMmdNRb5kQKWeQvOSlWl7rd4bvHGvVnxgcPC1bshEJKRt+VpaUjpm6CKd8C3Kt7IWfIX4HTVhKZkmLn7qv6aSfwWelwZfLdaXcLXixqzqNuUk/VWbF9JT4iiag9F3mt7xryIkoRp1AEjCA82HqK72F4JCFyOhCiGrMfKJw==" ],
+        "priority" : [ "100" ]
+      }
+    }, {
+      "id" : "f30af2d2-d042-43b8-bc6d-22f6bab6934c",
+      "name" : "hmac-generated",
+      "providerId" : "hmac-generated",
+      "subComponents" : { },
+      "config" : {
+        "kid" : [ "6f0d9688-e974-42b4-9d84-8d098c51007c" ],
+        "secret" : [ "8nruwD66Revr9k21e-BHtcyvNzAMFOsstxSAB0Gdy2qe2qGRm2kYOwsPzrH9ZQSdj2041SraKo6a3SHvCyTBAQ" ],
+        "priority" : [ "100" ],
+        "algorithm" : [ "HS256" ]
+      }
+    } ]
+  },
+  "internationalizationEnabled" : false,
+  "supportedLocales" : [ ],
+  "authenticationFlows" : [ {
+    "id" : "94c65ba1-ba50-4be2-94c4-de656145eb67",
+    "alias" : "Account verification options",
+    "description" : "Method with which to verity the existing account",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "idp-email-verification",
+      "authenticatorFlow" : false,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 20,
+      "autheticatorFlow" : true,
+      "flowAlias" : "Verify Existing Account by Re-authentication",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "3b706ddf-c4b6-498a-803c-772878bc9bc3",
+    "alias" : "Authentication Options",
+    "description" : "Authentication options.",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "basic-auth",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "basic-auth-otp",
+      "authenticatorFlow" : false,
+      "requirement" : "DISABLED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "auth-spnego",
+      "authenticatorFlow" : false,
+      "requirement" : "DISABLED",
+      "priority" : 30,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "9ea0b8f6-882c-45ad-9110-78adf5a5d233",
+    "alias" : "Browser - Conditional OTP",
+    "description" : "Flow to determine if the OTP is required for the authentication",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "conditional-user-configured",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "auth-otp-form",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "99c5ba83-b585-4601-b740-1a26670bf4e9",
+    "alias" : "Direct Grant - Conditional OTP",
+    "description" : "Flow to determine if the OTP is required for the authentication",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "conditional-user-configured",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "direct-grant-validate-otp",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "65b73dec-7dd1-4de8-b542-a023b7104afc",
+    "alias" : "First broker login - Conditional OTP",
+    "description" : "Flow to determine if the OTP is required for the authentication",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "conditional-user-configured",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "auth-otp-form",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "9a26b76f-da95-43f1-8da3-16c4a0654f07",
+    "alias" : "Handle Existing Account",
+    "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "idp-confirm-link",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : true,
+      "flowAlias" : "Account verification options",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "0a77285e-d7d5-4b6c-aa9a-3eadb5e7e3d3",
+    "alias" : "Reset - Conditional OTP",
+    "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "conditional-user-configured",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "reset-otp",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "cb6c0b3b-2f5f-4493-9d14-6130f8b58dd7",
+    "alias" : "User creation or linking",
+    "description" : "Flow for the existing/non-existing user alternatives",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticatorConfig" : "create unique user config",
+      "authenticator" : "idp-create-user-if-unique",
+      "authenticatorFlow" : false,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 20,
+      "autheticatorFlow" : true,
+      "flowAlias" : "Handle Existing Account",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "0fd3db1b-e93d-4768-82ca-a1498ddc11d0",
+    "alias" : "Verify Existing Account by Re-authentication",
+    "description" : "Reauthentication of existing account",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "idp-username-password-form",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "CONDITIONAL",
+      "priority" : 20,
+      "autheticatorFlow" : true,
+      "flowAlias" : "First broker login - Conditional OTP",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "86610e70-f9f5-4c11-8a9e-9de1770565fb",
+    "alias" : "browser",
+    "description" : "browser based authentication",
+    "providerId" : "basic-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "auth-cookie",
+      "authenticatorFlow" : false,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "auth-spnego",
+      "authenticatorFlow" : false,
+      "requirement" : "DISABLED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "identity-provider-redirector",
+      "authenticatorFlow" : false,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 25,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 30,
+      "autheticatorFlow" : true,
+      "flowAlias" : "forms",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "f6aa23dd-8532-4d92-9780-3ea226481e3b",
+    "alias" : "clients",
+    "description" : "Base authentication for clients",
+    "providerId" : "client-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "client-secret",
+      "authenticatorFlow" : false,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "client-jwt",
+      "authenticatorFlow" : false,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "client-secret-jwt",
+      "authenticatorFlow" : false,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 30,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "client-x509",
+      "authenticatorFlow" : false,
+      "requirement" : "ALTERNATIVE",
+      "priority" : 40,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "4d2caf65-1703-4ddb-8890-70232e91bcd8",
+    "alias" : "direct grant",
+    "description" : "OpenID Connect Resource Owner Grant",
+    "providerId" : "basic-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "direct-grant-validate-username",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "direct-grant-validate-password",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "CONDITIONAL",
+      "priority" : 30,
+      "autheticatorFlow" : true,
+      "flowAlias" : "Direct Grant - Conditional OTP",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "eaa20c41-5334-4fb4-8c45-fb9cc71f7f74",
+    "alias" : "docker auth",
+    "description" : "Used by Docker clients to authenticate against the IDP",
+    "providerId" : "basic-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "docker-http-basic-authenticator",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "b9febfb1-f0aa-4590-b782-272a4aa11575",
+    "alias" : "first broker login",
+    "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
+    "providerId" : "basic-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticatorConfig" : "review profile config",
+      "authenticator" : "idp-review-profile",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : true,
+      "flowAlias" : "User creation or linking",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "03bb6ff4-eccb-4f2f-8953-3769f78c3bf3",
+    "alias" : "forms",
+    "description" : "Username, password, otp and other auth forms.",
+    "providerId" : "basic-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "auth-username-password-form",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "CONDITIONAL",
+      "priority" : 20,
+      "autheticatorFlow" : true,
+      "flowAlias" : "Browser - Conditional OTP",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "38385189-246b-4ea0-ac05-d49dfe1709da",
+    "alias" : "http challenge",
+    "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
+    "providerId" : "basic-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "no-cookie-redirect",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : true,
+      "flowAlias" : "Authentication Options",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "1022f3c2-0469-41c9-861e-918908f103df",
+    "alias" : "registration",
+    "description" : "registration flow",
+    "providerId" : "basic-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "registration-page-form",
+      "authenticatorFlow" : true,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : true,
+      "flowAlias" : "registration form",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "00d36c3b-e1dc-41f8-bfd0-5f8c80ea07e8",
+    "alias" : "registration form",
+    "description" : "registration form",
+    "providerId" : "form-flow",
+    "topLevel" : false,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "registration-user-creation",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "registration-profile-action",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 40,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "registration-password-action",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 50,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "registration-recaptcha-action",
+      "authenticatorFlow" : false,
+      "requirement" : "DISABLED",
+      "priority" : 60,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "4374c16e-8c65-4168-94c2-df1ab3f3e6ad",
+    "alias" : "reset credentials",
+    "description" : "Reset credentials for a user if they forgot their password or something",
+    "providerId" : "basic-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "reset-credentials-choose-user",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "reset-credential-email",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 20,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticator" : "reset-password",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 30,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    }, {
+      "authenticatorFlow" : true,
+      "requirement" : "CONDITIONAL",
+      "priority" : 40,
+      "autheticatorFlow" : true,
+      "flowAlias" : "Reset - Conditional OTP",
+      "userSetupAllowed" : false
+    } ]
+  }, {
+    "id" : "04d6ed6a-76c9-41fb-9074-bff8a80c2286",
+    "alias" : "saml ecp",
+    "description" : "SAML ECP Profile Authentication Flow",
+    "providerId" : "basic-flow",
+    "topLevel" : true,
+    "builtIn" : true,
+    "authenticationExecutions" : [ {
+      "authenticator" : "http-basic-authenticator",
+      "authenticatorFlow" : false,
+      "requirement" : "REQUIRED",
+      "priority" : 10,
+      "autheticatorFlow" : false,
+      "userSetupAllowed" : false
+    } ]
+  } ],
+  "authenticatorConfig" : [ {
+    "id" : "e7bad67d-1236-430a-a327-9194f9d1e2b0",
+    "alias" : "create unique user config",
+    "config" : {
+      "require.password.update.after.registration" : "false"
+    }
+  }, {
+    "id" : "287b5989-a927-4cf5-8067-74594ce19bc1",
+    "alias" : "review profile config",
+    "config" : {
+      "update.profile.on.first.login" : "missing"
+    }
+  } ],
+  "requiredActions" : [ {
+    "alias" : "CONFIGURE_TOTP",
+    "name" : "Configure OTP",
+    "providerId" : "CONFIGURE_TOTP",
+    "enabled" : true,
+    "defaultAction" : false,
+    "priority" : 10,
+    "config" : { }
+  }, {
+    "alias" : "terms_and_conditions",
+    "name" : "Terms and Conditions",
+    "providerId" : "terms_and_conditions",
+    "enabled" : false,
+    "defaultAction" : false,
+    "priority" : 20,
+    "config" : { }
+  }, {
+    "alias" : "UPDATE_PASSWORD",
+    "name" : "Update Password",
+    "providerId" : "UPDATE_PASSWORD",
+    "enabled" : true,
+    "defaultAction" : false,
+    "priority" : 30,
+    "config" : { }
+  }, {
+    "alias" : "UPDATE_PROFILE",
+    "name" : "Update Profile",
+    "providerId" : "UPDATE_PROFILE",
+    "enabled" : true,
+    "defaultAction" : false,
+    "priority" : 40,
+    "config" : { }
+  }, {
+    "alias" : "VERIFY_EMAIL",
+    "name" : "Verify Email",
+    "providerId" : "VERIFY_EMAIL",
+    "enabled" : true,
+    "defaultAction" : false,
+    "priority" : 50,
+    "config" : { }
+  }, {
+    "alias" : "delete_account",
+    "name" : "Delete Account",
+    "providerId" : "delete_account",
+    "enabled" : false,
+    "defaultAction" : false,
+    "priority" : 60,
+    "config" : { }
+  }, {
+    "alias" : "webauthn-register",
+    "name" : "Webauthn Register",
+    "providerId" : "webauthn-register",
+    "enabled" : true,
+    "defaultAction" : false,
+    "priority" : 70,
+    "config" : { }
+  }, {
+    "alias" : "webauthn-register-passwordless",
+    "name" : "Webauthn Register Passwordless",
+    "providerId" : "webauthn-register-passwordless",
+    "enabled" : true,
+    "defaultAction" : false,
+    "priority" : 80,
+    "config" : { }
+  }, {
+    "alias" : "update_user_locale",
+    "name" : "Update User Locale",
+    "providerId" : "update_user_locale",
+    "enabled" : true,
+    "defaultAction" : false,
+    "priority" : 1000,
+    "config" : { }
+  } ],
+  "browserFlow" : "browser",
+  "registrationFlow" : "registration",
+  "directGrantFlow" : "direct grant",
+  "resetCredentialsFlow" : "reset credentials",
+  "clientAuthenticationFlow" : "clients",
+  "dockerAuthenticationFlow" : "docker auth",
+  "attributes" : {
+    "cibaBackchannelTokenDeliveryMode" : "poll",
+    "cibaAuthRequestedUserHint" : "login_hint",
+    "oauth2DevicePollingInterval" : "5",
+    "clientOfflineSessionMaxLifespan" : "0",
+    "clientSessionIdleTimeout" : "0",
+    "clientOfflineSessionIdleTimeout" : "0",
+    "cibaInterval" : "5",
+    "cibaExpiresIn" : "120",
+    "oauth2DeviceCodeLifespan" : "600",
+    "parRequestUriLifespan" : "60",
+    "clientSessionMaxLifespan" : "0",
+    "frontendUrl" : ""
+  },
+  "keycloakVersion" : "19.0.3",
+  "userManagedAccessAllowed" : false,
+  "clientProfiles" : {
+    "profiles" : [ ]
+  },
+  "clientPolicies" : {
+    "policies" : [ ]
+  }
+}
\ No newline at end of file
diff --git a/conf/localstack/buckets.sh b/conf/localstack/buckets.sh
new file mode 100755
index 00000000000..fe940d9890d
--- /dev/null
+++ b/conf/localstack/buckets.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+# https://stackoverflow.com/questions/53619901/auto-create-s3-buckets-on-localstack
+awslocal s3 mb s3://mybucket
diff --git a/conf/solr/8.11.1/readme.md b/conf/solr/8.11.1/readme.md
deleted file mode 100644
index 4457cf9a7df..00000000000
--- a/conf/solr/8.11.1/readme.md
+++ /dev/null
@@ -1 +0,0 @@
-Please see the dev guide for what to do with Solr config files.
\ No newline at end of file
diff --git a/conf/solr/8.11.1/schema.xml b/conf/solr/9.3.0/schema.xml
similarity index 96%
rename from conf/solr/8.11.1/schema.xml
rename to conf/solr/9.3.0/schema.xml
index f11938621fc..3711ffeddba 100644
--- a/conf/solr/8.11.1/schema.xml
+++ b/conf/solr/9.3.0/schema.xml
@@ -23,7 +23,7 @@
 
 
  For more information, on how to customize this file, please see
- http://lucene.apache.org/solr/guide/documents-fields-and-schema-design.html
+ https://solr.apache.org/guide/solr/latest/indexing-guide/schema-elements.html
 
  PERFORMANCE NOTE: this schema includes many optional features and should not
  be used for benchmarking.  To improve performance one could
@@ -38,7 +38,7 @@
     catchall "text" field, and use that for searching.
 -->
 
-<schema name="default-config" version="1.7">
+<schema name="default-config" version="1.6">
     <!-- attribute "name" is the name of this schema and is only used for display purposes.
        version="x.y" is Solr's version number for the schema syntax and 
        semantics.  It should not normally be changed by applications.
@@ -129,15 +129,8 @@
     <!-- catchall text field that indexes tokens both normally and in reverse for efficient
         leading wildcard queries. -->
     <field name="text_rev" type="text_general_rev" indexed="true" stored="false" multiValued="true"/>    
-    <field name="name" type="text_en" indexed="true" stored="true"/> 
-
-
-
-
-
-
-
-
+    <field name="name" type="text_en" indexed="true" stored="true"/>
+    
     <field name="definitionPointDocId" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="definitionPointDvObjectId" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="discoverableBy" type="string" stored="true" indexed="true" multiValued="true"/>
@@ -163,7 +156,7 @@
 
     <field name="publicationStatus" type="string" stored="true" indexed="true" multiValued="true"/>
     <field name="externalStatus" type="string" stored="true" indexed="true" multiValued="false"/>
-    <field name="embargoEndDate" type="long" stored="true" indexed="true" multiValued="false"/>
+    <field name="embargoEndDate" type="plong" stored="true" indexed="true" multiValued="false"/>
     
     <field name="subtreePaths" type="string" stored="true" indexed="true" multiValued="true"/>
 
@@ -200,7 +193,7 @@
     <field name="identifier" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="persistentUrl" type="string" stored="true" indexed="false" multiValued="false"/>
     <field name="unf" type="string" stored="true" indexed="true" multiValued="false"/>
-    <field name="fileSizeInBytes" type="long" stored="true" indexed="true" multiValued="false"/>
+    <field name="fileSizeInBytes" type="plong" stored="true" indexed="true" multiValued="false"/>
     <field name="fileMd5" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="fileChecksumType" type="string" stored="true" indexed="true" multiValued="false"/>
     <field name="fileChecksumValue" type="string" stored="true" indexed="true" multiValued="false"/>
@@ -208,9 +201,9 @@
     <field name="deaccessionReason" type="string" stored="true" indexed="false" multiValued="false"/>
 
     <!-- Added for Dataverse 4.0 alpha 1. This is a required field so we don't have to go to the database to get the database id of the entity. On cards we use the id in links -->
-    <field name="entityId" type="long" stored="true" indexed="true" multiValued="false"/>
+    <field name="entityId" type="plong" stored="true" indexed="true" multiValued="false"/>
 
-    <field name="datasetVersionId" type="long" stored="true" indexed="true" multiValued="false"/>
+    <field name="datasetVersionId" type="plong" stored="true" indexed="true" multiValued="false"/>
 
     <!-- Added for Dataverse 4.0 alpha 1 to sort by name  -->
     <!-- https://redmine.hmdc.harvard.edu/issues/3482 -->
@@ -218,10 +211,10 @@
     <!-- http://stackoverflow.com/questions/13360706/solr-4-0-alphabetical-sorting-trouble/13361226#13361226 -->
     <field name="nameSort" type="alphaOnlySort" indexed="true" stored="true"/>
 
-    <field name="dateSort" type="date" indexed="true" stored="true"/>
+    <field name="dateSort" type="pdate" indexed="true" stored="true"/>
 
     <!-- Added for Dataverse 4.0: release date https://redmine.hmdc.harvard.edu/issues/3592 -->
-    <field name="releasedate" type="int" indexed="true" stored="true"/>
+    <field name="releasedate" type="pint" indexed="true" stored="true"/>
 
     <!-- Added for Dataverse 4.0: do we want a description field that applies to dataverses, datasets, and files? https://redmine.hmdc.harvard.edu/issues/3745 -->
     <field name="description" type="text_en" multiValued="false" stored="true" indexed="true"/>
@@ -233,6 +226,9 @@
     <field name="geolocation" type="location_rpt" multiValued="true" stored="true" indexed="true"/>
     <!-- https://solr.apache.org/guide/8_11/spatial-search.html#bboxfield -->
     <field name="boundingBox" type="bbox" multiValued="true" stored="true" indexed="true"/>
+
+    <!-- incomplete datasets issue 8822 -->
+    <field name="datasetValid" type="boolean" stored="true" indexed="true" multiValued="false"/>
     
     <!--
         METADATA SCHEMA FIELDS
@@ -250,7 +246,7 @@
     <!-- SCHEMA-FIELDS::BEGIN -->
     <field name="accessToSources" type="text_en" multiValued="false" stored="true" indexed="true"/>
     <field name="actionsToMinimizeLoss" type="text_en" multiValued="false" stored="true" indexed="true"/>
-    <field name="alternativeTitle" type="text_en" multiValued="false" stored="true" indexed="true"/>
+    <field name="alternativeTitle" type="text_en" multiValued="true" stored="true" indexed="true"/>
     <field name="alternativeURL" type="text_en" multiValued="false" stored="true" indexed="true"/>
     <field name="astroFacility" type="text_en" multiValued="true" stored="true" indexed="true"/>
     <field name="astroInstrument" type="text_en" multiValued="true" stored="true" indexed="true"/>
@@ -364,9 +360,9 @@
     <field name="responseRate" type="text_en" multiValued="false" stored="true" indexed="true"/>
     <field name="samplingErrorEstimates" type="text_en" multiValued="false" stored="true" indexed="true"/>
     <field name="samplingProcedure" type="text_en" multiValued="false" stored="true" indexed="true"/>
-    <field name="series" type="text_en" multiValued="false" stored="true" indexed="true"/>
-    <field name="seriesInformation" type="text_en" multiValued="false" stored="true" indexed="true"/>
-    <field name="seriesName" type="text_en" multiValued="false" stored="true" indexed="true"/>
+    <field name="series" type="text_en" multiValued="true" stored="true" indexed="true"/>
+    <field name="seriesInformation" type="text_en" multiValued="true" stored="true" indexed="true"/>
+    <field name="seriesName" type="text_en" multiValued="true" stored="true" indexed="true"/>
     <field name="socialScienceNotes" type="text_en" multiValued="false" stored="true" indexed="true"/>
     <field name="socialScienceNotesSubject" type="text_en" multiValued="false" stored="true" indexed="true"/>
     <field name="socialScienceNotesText" type="text_en" multiValued="false" stored="true" indexed="true"/>
@@ -470,6 +466,8 @@
     <!-- <copyField source="*_ss" dest="_text_" maxChars="3000"/> -->
     <!-- <copyField source="*_i" dest="_text_" maxChars="3000"/> -->
     
+    <copyField source="datasetValid" dest="_text_" maxChars="3000"/>
+    
     <!--
         METADATA SCHEMA FIELDS
         Now following: copyFields to copy the contents of the metadata fields above to a
@@ -653,7 +651,8 @@
     <!-- Dynamic field definitions allow using convention over configuration
        for fields via the specification of patterns to match field names.
        EXAMPLE:  name="*_i" will match any field ending in _i (like myid_i, z_i)
-       RESTRICTION: the glob-like pattern in the name attribute must have a "*" only at the start or the end.  -->
+       RESTRICTION: the glob-like pattern in the name attribute must have a "*"
+       only at the start or the end.  -->
    
     <dynamicField name="*_i"  type="pint"    indexed="true"  stored="true"/>
     <dynamicField name="*_is" type="pints"    indexed="true"  stored="true"/>
@@ -661,19 +660,23 @@
     <dynamicField name="*_ss" type="strings"  indexed="true"  stored="true"/>
     <dynamicField name="*_l"  type="plong"   indexed="true"  stored="true"/>
     <dynamicField name="*_ls" type="plongs"   indexed="true"  stored="true"/>
-    <dynamicField name="*_txt" type="text_general" indexed="true" stored="true"/>
     <dynamicField name="*_b"  type="boolean" indexed="true" stored="true"/>
     <dynamicField name="*_bs" type="booleans" indexed="true" stored="true"/>
     <dynamicField name="*_f"  type="pfloat"  indexed="true"  stored="true"/>
     <dynamicField name="*_fs" type="pfloats"  indexed="true"  stored="true"/>
     <dynamicField name="*_d"  type="pdouble" indexed="true"  stored="true"/>
     <dynamicField name="*_ds" type="pdoubles" indexed="true"  stored="true"/>
+    <dynamicField name="*_dt"  type="pdate"    indexed="true"  stored="true"/>
+    <dynamicField name="*_dts" type="pdates"   indexed="true"  stored="true"/>
+    <dynamicField name="*_t"   type="text_general" indexed="true" stored="true" multiValued="false"/>
+    <dynamicField name="*_txt" type="text_general" indexed="true" stored="true"/>
+    
+    <dynamicField name="random_*" type="random"/>
+    <dynamicField name="ignored_*" type="ignored"/>
 
     <!-- Type used for data-driven schema, to add a string copy for each text field -->
-    <dynamicField name="*_str" type="strings" stored="false" docValues="true" indexed="false" />
-
-    <dynamicField name="*_dt"  type="pdate"    indexed="true"  stored="true"/>
-    <dynamicField name="*_dts" type="pdate"    indexed="true"  stored="true" multiValued="true"/>
+    <dynamicField name="*_str" type="strings" stored="false" docValues="true" indexed="false" useDocValuesAsStored="false" />
+    
     <dynamicField name="*_p"  type="location" indexed="true" stored="true"/>
     <dynamicField name="*_srpt"  type="location_rpt" indexed="true" stored="true"/>
     
@@ -719,43 +722,6 @@
          field first in an ascending sort and last in a descending sort.
     -->
 
-<fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
-<fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
-<fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
-<fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
-
-<fieldType name="tint" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
-<fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
-<fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
-<fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
-
-<!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
-        is a more restricted form of the canonical representation of dateTime
-        http://www.w3.org/TR/xmlschema-2/#dateTime    
-        The trailing "Z" designates UTC time and is mandatory.
-        Optional fractional seconds are allowed: 1995-12-31T23:59:59.999Z
-        All other components are mandatory.
-
-        Expressions can also be used to denote calculations that should be
-        performed relative to "NOW" to determine the value, ie...
-
-            NOW/HOUR
-                ... Round to the start of the current hour
-            NOW-1DAY
-                ... Exactly 1 day prior to now
-            NOW/DAY+6MONTHS+3DAYS
-                ... 6 months and 3 days in the future from the start of
-                    the current day
-                    
-        Consult the DateField javadocs for more information.
-
-        Note: For faster range queries, consider the tdate type
-    -->
-    <fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
-
-    <!-- A Trie based date field for faster date range queries and date faceting. -->
-    <fieldType name="tdate" class="solr.TrieDateField" precisionStep="6" positionIncrementGap="0"/>
-
     <!-- This is an example of using the KeywordTokenizer along
         With various TokenFilterFactories to produce a sortable field
         that does not include some properties of the source text
@@ -810,6 +776,11 @@
     <fieldType name="pfloats" class="solr.FloatPointField" docValues="true" multiValued="true"/>
     <fieldType name="plongs" class="solr.LongPointField" docValues="true" multiValued="true"/>
     <fieldType name="pdoubles" class="solr.DoublePointField" docValues="true" multiValued="true"/>
+    <fieldType name="random" class="solr.RandomSortField" indexed="true"/>
+    
+    <!-- since fields of this type are by default not stored or indexed,
+       any data added to them will be ignored outright.  -->
+    <fieldType name="ignored" stored="false" indexed="false" multiValued="true" class="solr.StrField" />
 
     <!-- The format for this date field is of the form 1995-12-31T23:59:59Z, and
          is a more restricted form of the canonical representation of dateTime
@@ -836,7 +807,14 @@
     
     <!--Binary data type. The data should be sent/retrieved in as Base64 encoded Strings -->
     <fieldType name="binary" class="solr.BinaryField"/>
-
+    
+    <!--
+    RankFields can be used to store scoring factors to improve document ranking. They should be used
+    in combination with RankQParserPlugin.
+    (experimental)
+    -->
+    <fieldType name="rank" class="solr.RankField"/>
+    
     <!-- solr.TextField allows the specification of custom text analyzers
          specified as a tokenizer and a list of token filters. Different
          analyzers may be specified for indexing and querying.
@@ -846,7 +824,7 @@
          matching across fields.
 
          For more info on customizing your analyzer chain, please see
-         http://lucene.apache.org/solr/guide/understanding-analyzers-tokenizers-and-filters.html#understanding-analyzers-tokenizers-and-filters
+         https://solr.apache.org/guide/solr/latest/indexing-guide/document-analysis.html#using-analyzers-tokenizers-and-filters
      -->
 
     <!-- One can also specify an existing Analyzer class that has a
@@ -861,7 +839,7 @@
     <dynamicField name="*_ws" type="text_ws"  indexed="true"  stored="true"/>
     <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
       <analyzer>
-        <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+        <tokenizer name="whitespace"/>
       </analyzer>
     </fieldType>
 
@@ -888,6 +866,30 @@
         <filter class="solr.LowerCaseFilterFactory"/>
       </analyzer>
     </fieldType>
+    
+    <!-- SortableTextField generaly functions exactly like TextField,
+        except that it supports, and by default uses, docValues for sorting (or faceting)
+        on the first 1024 characters of the original field values (which is configurable).
+
+        This makes it a bit more useful then TextField in many situations, but the trade-off
+        is that it takes up more space on disk; which is why it's not used in place of TextField
+        for every fieldType in this _default schema.
+    -->
+    <dynamicField name="*_t_sort" type="text_gen_sort" indexed="true" stored="true" multiValued="false"/>
+    <dynamicField name="*_txt_sort" type="text_gen_sort" indexed="true" stored="true"/>
+    <fieldType name="text_gen_sort" class="solr.SortableTextField" positionIncrementGap="100" multiValued="true">
+      <analyzer type="index">
+        <tokenizer name="standard"/>
+        <filter name="stop" ignoreCase="true" words="stopwords.txt" />
+        <filter name="lowercase"/>
+      </analyzer>
+      <analyzer type="query">
+        <tokenizer name="standard"/>
+        <filter name="stop" ignoreCase="true" words="stopwords.txt" />
+        <filter name="synonymGraph" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+        <filter name="lowercase"/>
+      </analyzer>
+    </fieldType>
 
     <!-- A text field with defaults appropriate for English: it tokenizes with StandardTokenizer,
          removes English stop words (lang/stopwords_en.txt), down cases, protects words from protwords.txt, and
diff --git a/conf/solr/8.11.1/solrconfig.xml b/conf/solr/9.3.0/solrconfig.xml
similarity index 67%
rename from conf/solr/8.11.1/solrconfig.xml
rename to conf/solr/9.3.0/solrconfig.xml
index 3e4e5adc7b6..36ed4f23390 100644
--- a/conf/solr/8.11.1/solrconfig.xml
+++ b/conf/solr/9.3.0/solrconfig.xml
@@ -1,1410 +1,1179 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!--
-     For more details about configurations options that may appear in
-     this file, see http://wiki.apache.org/solr/SolrConfigXml.
--->
-<config>
-  <!-- In all configuration below, a prefix of "solr." for class names
-       is an alias that causes solr to search appropriate packages,
-       including org.apache.solr.(search|update|request|core|analysis)
-
-       You may also specify a fully qualified Java classname if you
-       have your own custom plugins.
-    -->
-
-  <!-- Controls what version of Lucene various components of Solr
-       adhere to.  Generally, you want to use the latest version to
-       get all bug fixes and improvements. It is highly recommended
-       that you fully re-index after changing this setting as it can
-       affect both how text is indexed and queried.
-  -->
-  <luceneMatchVersion>7.3.0</luceneMatchVersion>
-
-  <!-- <lib/> directives can be used to instruct Solr to load any Jars
-       identified and use them to resolve any "plugins" specified in
-       your solrconfig.xml or schema.xml (ie: Analyzers, Request
-       Handlers, etc...).
-
-       All directories and paths are resolved relative to the
-       instanceDir.
-
-       Please note that <lib/> directives are processed in the order
-       that they appear in your solrconfig.xml file, and are "stacked"
-       on top of each other when building a ClassLoader - so if you have
-       plugin jars with dependencies on other jars, the "lower level"
-       dependency jars should be loaded first.
-
-       If a "./lib" directory exists in your instanceDir, all files
-       found in it are included as if you had used the following
-       syntax...
-
-              <lib dir="./lib" />
-    -->
-
-  <!-- A 'dir' option by itself adds any files found in the directory
-       to the classpath, this is useful for including all jars in a
-       directory.
-
-       When a 'regex' is specified in addition to a 'dir', only the
-       files in that directory which completely match the regex
-       (anchored on both ends) will be included.
-
-       If a 'dir' option (with or without a regex) is used and nothing
-       is found that matches, a warning will be logged.
-
-       The examples below can be used to load some solr-contribs along
-       with their external dependencies.
-    -->
-  <lib dir="${solr.install.dir:../../../..}/contrib/extraction/lib" regex=".*\.jar" />
-  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-cell-\d.*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/contrib/clustering/lib/" regex=".*\.jar" />
-  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-clustering-\d.*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/contrib/langid/lib/" regex=".*\.jar" />
-  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-langid-\d.*\.jar" />
-
-  <lib dir="${solr.install.dir:../../../..}/contrib/velocity/lib" regex=".*\.jar" />
-  <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-velocity-\d.*\.jar" />
-  <!-- an exact 'path' can be used instead of a 'dir' to specify a
-       specific jar file.  This will cause a serious error to be logged
-       if it can't be loaded.
-    -->
-  <!--
-     <lib path="../a-jar-that-does-not-exist.jar" />
-  -->
-
-  <!-- Data Directory
-
-       Used to specify an alternate directory to hold all index data
-       other than the default ./data under the Solr home.  If
-       replication is in use, this should match the replication
-       configuration.
-    -->
-  <dataDir>${solr.data.dir:}</dataDir>
-
-
-  <!-- The DirectoryFactory to use for indexes.
-
-       solr.StandardDirectoryFactory is filesystem
-       based and tries to pick the best implementation for the current
-       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
-       wraps solr.StandardDirectoryFactory and caches small files in memory
-       for better NRT performance.
-
-       One can force a particular implementation via solr.MMapDirectoryFactory,
-       solr.NIOFSDirectoryFactory, or solr.SimpleFSDirectoryFactory.
-
-       solr.RAMDirectoryFactory is memory based and not persistent.
-    -->
-  <directoryFactory name="DirectoryFactory"
-                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
-
-  <!-- The CodecFactory for defining the format of the inverted index.
-       The default implementation is SchemaCodecFactory, which is the official Lucene
-       index format, but hooks into the schema to provide per-field customization of
-       the postings lists and per-document values in the fieldType element
-       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
-       are experimental, so if you choose to customize the index format, it's a good
-       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
-       before upgrading to a newer version to avoid unnecessary reindexing.
-       A "compressionMode" string element can be added to <codecFactory> to choose
-       between the existing compression modes in the default codec: "BEST_SPEED" (default)
-       or "BEST_COMPRESSION".
-  -->
-  <codecFactory class="solr.SchemaCodecFactory"/>
-
-  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-       Index Config - These settings control low-level behavior of indexing
-       Most example settings here show the default value, but are commented
-       out, to more easily see where customizations have been made.
-
-       Note: This replaces <indexDefaults> and <mainIndex> from older versions
-       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
-  <indexConfig>
-    <!-- maxFieldLength was removed in 4.0. To get similar behavior, include a
-         LimitTokenCountFilterFactory in your fieldType definition. E.g.
-     <filter class="solr.LimitTokenCountFilterFactory" maxTokenCount="10000"/>
-    -->
-    <!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 -->
-    <!-- <writeLockTimeout>1000</writeLockTimeout>  -->
-
-    <!-- Expert: Enabling compound file will use less files for the index,
-         using fewer file descriptors on the expense of performance decrease.
-         Default in Lucene is "true". Default in Solr is "false" (since 3.6) -->
-    <!-- <useCompoundFile>false</useCompoundFile> -->
-
-    <!-- ramBufferSizeMB sets the amount of RAM that may be used by Lucene
-         indexing for buffering added documents and deletions before they are
-         flushed to the Directory.
-         maxBufferedDocs sets a limit on the number of documents buffered
-         before flushing.
-         If both ramBufferSizeMB and maxBufferedDocs is set, then
-         Lucene will flush based on whichever limit is hit first.  -->
-    <!-- <ramBufferSizeMB>100</ramBufferSizeMB> -->
-    <!-- <maxBufferedDocs>1000</maxBufferedDocs> -->
-
-    <!-- Expert: Merge Policy
-         The Merge Policy in Lucene controls how merging of segments is done.
-         The default since Solr/Lucene 3.3 is TieredMergePolicy.
-         The default since Lucene 2.3 was the LogByteSizeMergePolicy,
-         Even older versions of Lucene used LogDocMergePolicy.
-      -->
-    <!--
-        <mergePolicyFactory class="org.apache.solr.index.TieredMergePolicyFactory">
-          <int name="maxMergeAtOnce">10</int>
-          <int name="segmentsPerTier">10</int>
-          <double name="noCFSRatio">0.1</double>
-        </mergePolicyFactory>
-      -->
-
-    <!-- Expert: Merge Scheduler
-         The Merge Scheduler in Lucene controls how merges are
-         performed.  The ConcurrentMergeScheduler (Lucene 2.3 default)
-         can perform merges in the background using separate threads.
-         The SerialMergeScheduler (Lucene 2.2 default) does not.
-     -->
-    <!--
-       <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
-       -->
-
-    <!-- LockFactory
-
-         This option specifies which Lucene LockFactory implementation
-         to use.
-
-         single = SingleInstanceLockFactory - suggested for a
-                  read-only index or when there is no possibility of
-                  another process trying to modify the index.
-         native = NativeFSLockFactory - uses OS native file locking.
-                  Do not use when multiple solr webapps in the same
-                  JVM are attempting to share a single index.
-         simple = SimpleFSLockFactory  - uses a plain file for locking
-
-         Defaults: 'native' is default for Solr3.6 and later, otherwise
-                   'simple' is the default
-
-         More details on the nuances of each LockFactory...
-         http://wiki.apache.org/lucene-java/AvailableLockFactories
-    -->
-    <lockType>${solr.lock.type:native}</lockType>
-
-    <!-- Commit Deletion Policy
-         Custom deletion policies can be specified here. The class must
-         implement org.apache.lucene.index.IndexDeletionPolicy.
-
-         The default Solr IndexDeletionPolicy implementation supports
-         deleting index commit points on number of commits, age of
-         commit point and optimized status.
-
-         The latest commit point should always be preserved regardless
-         of the criteria.
-    -->
-    <!--
-    <deletionPolicy class="solr.SolrDeletionPolicy">
-    -->
-    <!-- The number of commit points to be kept -->
-    <!-- <str name="maxCommitsToKeep">1</str> -->
-    <!-- The number of optimized commit points to be kept -->
-    <!-- <str name="maxOptimizedCommitsToKeep">0</str> -->
-    <!--
-        Delete all commit points once they have reached the given age.
-        Supports DateMathParser syntax e.g.
-      -->
-    <!--
-       <str name="maxCommitAge">30MINUTES</str>
-       <str name="maxCommitAge">1DAY</str>
-    -->
-    <!--
-    </deletionPolicy>
-    -->
-
-    <!-- Lucene Infostream
-
-         To aid in advanced debugging, Lucene provides an "InfoStream"
-         of detailed information when indexing.
-
-         Setting The value to true will instruct the underlying Lucene
-         IndexWriter to write its debugging info the specified file
-      -->
-    <!-- <infoStream file="INFOSTREAM.txt">false</infoStream> -->
-  </indexConfig>
-
-
-  <!-- JMX
-
-       This example enables JMX if and only if an existing MBeanServer
-       is found, use this if you want to configure JMX through JVM
-       parameters. Remove this to disable exposing Solr configuration
-       and statistics to JMX.
-
-       For more details see http://wiki.apache.org/solr/SolrJmx
-    -->
-  <jmx />
-  <!-- If you want to connect to a particular server, specify the
-       agentId
-    -->
-  <!-- <jmx agentId="myAgent" /> -->
-  <!-- If you want to start a new MBeanServer, specify the serviceUrl -->
-  <!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
-    -->
-
-  <!-- The default high-performance update handler -->
-  <updateHandler class="solr.DirectUpdateHandler2">
-
-    <!-- Enables a transaction log, used for real-time get, durability, and
-         and solr cloud replica recovery.  The log can grow as big as
-         uncommitted changes to the index, so use of a hard autoCommit
-         is recommended (see below).
-         "dir" - the target directory for transaction logs, defaults to the
-                solr data directory.
-         "numVersionBuckets" - sets the number of buckets used to keep
-                track of max version values when checking for re-ordered
-                updates; increase this value to reduce the cost of
-                synchronizing access to version buckets during high-volume
-                indexing, this requires 8 bytes (long) * numVersionBuckets
-                of heap space per Solr core.
-    -->
-    <updateLog>
-      <str name="dir">${solr.ulog.dir:}</str>
-      <int name="numVersionBuckets">${solr.ulog.numVersionBuckets:65536}</int>
-    </updateLog>
-
-    <!-- AutoCommit
-
-         Perform a hard commit automatically under certain conditions.
-         Instead of enabling autoCommit, consider using "commitWithin"
-         when adding documents.
-
-         http://wiki.apache.org/solr/UpdateXmlMessages
-
-         maxDocs - Maximum number of documents to add since the last
-                   commit before automatically triggering a new commit.
-
-         maxTime - Maximum amount of time in ms that is allowed to pass
-                   since a document was added before automatically
-                   triggering a new commit.
-         openSearcher - if false, the commit causes recent index changes
-           to be flushed to stable storage, but does not cause a new
-           searcher to be opened to make those changes visible.
-
-         If the updateLog is enabled, then it's highly recommended to
-         have some sort of hard autoCommit to limit the log size.
-      -->
-    <autoCommit>
-      <maxTime>${solr.autoCommit.maxTime:15000}</maxTime>
-      <openSearcher>false</openSearcher>
-    </autoCommit>
-
-    <!-- softAutoCommit is like autoCommit except it causes a
-         'soft' commit which only ensures that changes are visible
-         but does not ensure that data is synced to disk.  This is
-         faster and more near-realtime friendly than a hard commit.
-      -->
-
-    <autoSoftCommit>
-      <maxTime>${solr.autoSoftCommit.maxTime:-1}</maxTime>
-    </autoSoftCommit>
-
-    <!-- Update Related Event Listeners
-
-         Various IndexWriter related events can trigger Listeners to
-         take actions.
-
-         postCommit - fired after every commit or optimize command
-         postOptimize - fired after every optimize command
-      -->
-
-  </updateHandler>
-
-  <!-- IndexReaderFactory
-
-       Use the following format to specify a custom IndexReaderFactory,
-       which allows for alternate IndexReader implementations.
-
-       ** Experimental Feature **
-
-       Please note - Using a custom IndexReaderFactory may prevent
-       certain other features from working. The API to
-       IndexReaderFactory may change without warning or may even be
-       removed from future releases if the problems cannot be
-       resolved.
-
-
-       ** Features that may not work with custom IndexReaderFactory **
-
-       The ReplicationHandler assumes a disk-resident index. Using a
-       custom IndexReader implementation may cause incompatibility
-       with ReplicationHandler and may cause replication to not work
-       correctly. See SOLR-1366 for details.
-
-    -->
-  <!--
-  <indexReaderFactory name="IndexReaderFactory" class="package.class">
-    <str name="someArg">Some Value</str>
-  </indexReaderFactory >
-  -->
-
-  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-       Query section - these settings control query time things like caches
-       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
-  <query>
-
-    <!-- Maximum number of clauses in each BooleanQuery,  an exception
-         is thrown if exceeded.  It is safe to increase or remove this setting,
-         since it is purely an arbitrary limit to try and catch user errors where
-         large boolean queries may not be the best implementation choice.
-      -->
-    <maxBooleanClauses>1024</maxBooleanClauses>
-
-    <!-- Solr Internal Query Caches
-
-         There are two implementations of cache available for Solr,
-         LRUCache, based on a synchronized LinkedHashMap, and
-         FastLRUCache, based on a ConcurrentHashMap.
-
-         FastLRUCache has faster gets and slower puts in single
-         threaded operation and thus is generally faster than LRUCache
-         when the hit ratio of the cache is high (> 75%), and may be
-         faster under other scenarios on multi-cpu systems.
-    -->
-
-    <!-- Filter Cache
-
-         Cache used by SolrIndexSearcher for filters (DocSets),
-         unordered sets of *all* documents that match a query.  When a
-         new searcher is opened, its caches may be prepopulated or
-         "autowarmed" using data from caches in the old searcher.
-         autowarmCount is the number of items to prepopulate.  For
-         LRUCache, the autowarmed items will be the most recently
-         accessed items.
-
-         Parameters:
-           class - the SolrCache implementation LRUCache or
-               (LRUCache or FastLRUCache)
-           size - the maximum number of entries in the cache
-           initialSize - the initial capacity (number of entries) of
-               the cache.  (see java.util.HashMap)
-           autowarmCount - the number of entries to prepopulate from
-               and old cache.
-           maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
-                      to occupy. Note that when this option is specified, the size
-                      and initialSize parameters are ignored.
-      -->
-    <filterCache class="solr.search.CaffeineCache"
-                 size="512"
-                 initialSize="512"
-                 autowarmCount="0"/>
-
-    <!-- Query Result Cache
-
-         Caches results of searches - ordered lists of document ids
-         (DocList) based on a query, a sort, and the range of documents requested.
-         Additional supported parameter by LRUCache:
-            maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
-                       to occupy
-      -->
-    <queryResultCache class="solr.search.CaffeineCache"
-                      size="512"
-                      initialSize="512"
-                      autowarmCount="0"/>
-
-    <!-- Document Cache
-
-         Caches Lucene Document objects (the stored fields for each
-         document).  Since Lucene internal document ids are transient,
-         this cache will not be autowarmed.
-      -->
-    <documentCache class="solr.search.CaffeineCache"
-                   size="512"
-                   initialSize="512"
-                   autowarmCount="0"/>
-
-    <!-- custom cache currently used by block join -->
-    <cache name="perSegFilter"
-           class="solr.search.CaffeineCache"
-           size="10"
-           initialSize="0"
-           autowarmCount="10"
-           regenerator="solr.NoOpRegenerator" />
-
-    <!-- Field Value Cache
-
-         Cache used to hold field values that are quickly accessible
-         by document id.  The fieldValueCache is created by default
-         even if not configured here.
-      -->
-    <!--
-       <fieldValueCache class="solr.FastLRUCache"
-                        size="512"
-                        autowarmCount="128"
-                        showItems="32" />
-      -->
-
-    <!-- Custom Cache
-
-         Example of a generic cache.  These caches may be accessed by
-         name through SolrIndexSearcher.getCache(),cacheLookup(), and
-         cacheInsert().  The purpose is to enable easy caching of
-         user/application level data.  The regenerator argument should
-         be specified as an implementation of solr.CacheRegenerator
-         if autowarming is desired.
-      -->
-    <!--
-       <cache name="myUserCache"
-              class="solr.LRUCache"
-              size="4096"
-              initialSize="1024"
-              autowarmCount="1024"
-              regenerator="com.mycompany.MyRegenerator"
-              />
-      -->
-
-
-    <!-- Lazy Field Loading
-
-         If true, stored fields that are not requested will be loaded
-         lazily.  This can result in a significant speed improvement
-         if the usual case is to not load all stored fields,
-         especially if the skipped fields are large compressed text
-         fields.
-    -->
-    <enableLazyFieldLoading>true</enableLazyFieldLoading>
-
-    <!-- Use Filter For Sorted Query
-
-         A possible optimization that attempts to use a filter to
-         satisfy a search.  If the requested sort does not include
-         score, then the filterCache will be checked for a filter
-         matching the query. If found, the filter will be used as the
-         source of document ids, and then the sort will be applied to
-         that.
-
-         For most situations, this will not be useful unless you
-         frequently get the same search repeatedly with different sort
-         options, and none of them ever use "score"
-      -->
-    <!--
-       <useFilterForSortedQuery>true</useFilterForSortedQuery>
-      -->
-
-    <!-- Result Window Size
-
-         An optimization for use with the queryResultCache.  When a search
-         is requested, a superset of the requested number of document ids
-         are collected.  For example, if a search for a particular query
-         requests matching documents 10 through 19, and queryWindowSize is 50,
-         then documents 0 through 49 will be collected and cached.  Any further
-         requests in that range can be satisfied via the cache.
-      -->
-    <queryResultWindowSize>20</queryResultWindowSize>
-
-    <!-- Maximum number of documents to cache for any entry in the
-         queryResultCache.
-      -->
-    <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
-
-    <!-- Query Related Event Listeners
-
-         Various IndexSearcher related events can trigger Listeners to
-         take actions.
-
-         newSearcher - fired whenever a new searcher is being prepared
-         and there is a current searcher handling requests (aka
-         registered).  It can be used to prime certain caches to
-         prevent long request times for certain requests.
-
-         firstSearcher - fired whenever a new searcher is being
-         prepared but there is no current registered searcher to handle
-         requests or to gain autowarming data from.
-
-
-      -->
-    <!-- QuerySenderListener takes an array of NamedList and executes a
-         local query request for each NamedList in sequence.
-      -->
-    <listener event="newSearcher" class="solr.QuerySenderListener">
-      <arr name="queries">
-        <!--
-           <lst><str name="q">solr</str><str name="sort">price asc</str></lst>
-           <lst><str name="q">rocks</str><str name="sort">weight asc</str></lst>
-          -->
-      </arr>
-    </listener>
-    <listener event="firstSearcher" class="solr.QuerySenderListener">
-      <arr name="queries">
-        <!--
-        <lst>
-          <str name="q">static firstSearcher warming in solrconfig.xml</str>
-        </lst>
-        -->
-      </arr>
-    </listener>
-
-    <!-- Use Cold Searcher
-
-         If a search request comes in and there is no current
-         registered searcher, then immediately register the still
-         warming searcher and use it.  If "false" then all requests
-         will block until the first searcher is done warming.
-      -->
-    <useColdSearcher>false</useColdSearcher>
-
-  </query>
-
-
-  <!-- Request Dispatcher
-
-       This section contains instructions for how the SolrDispatchFilter
-       should behave when processing requests for this SolrCore.
-
-    -->
-  <requestDispatcher>
-    <!-- Request Parsing
-
-         These settings indicate how Solr Requests may be parsed, and
-         what restrictions may be placed on the ContentStreams from
-         those requests
-
-         enableRemoteStreaming - enables use of the stream.file
-         and stream.url parameters for specifying remote streams.
-
-         multipartUploadLimitInKB - specifies the max size (in KiB) of
-         Multipart File Uploads that Solr will allow in a Request.
-
-         formdataUploadLimitInKB - specifies the max size (in KiB) of
-         form data (application/x-www-form-urlencoded) sent via
-         POST. You can use POST to pass request parameters not
-         fitting into the URL.
-
-         addHttpRequestToContext - if set to true, it will instruct
-         the requestParsers to include the original HttpServletRequest
-         object in the context map of the SolrQueryRequest under the
-         key "httpRequest". It will not be used by any of the existing
-         Solr components, but may be useful when developing custom
-         plugins.
-
-         *** WARNING ***
-         Before enabling remote streaming, you should make sure your
-         system has authentication enabled.
-
-    <requestParsers enableRemoteStreaming="false"
-                    multipartUploadLimitInKB="-1"
-                    formdataUploadLimitInKB="-1"
-                    addHttpRequestToContext="false"/>
-      -->
-
-    <!-- HTTP Caching
-
-         Set HTTP caching related parameters (for proxy caches and clients).
-
-         The options below instruct Solr not to output any HTTP Caching
-         related headers
-      -->
-    <httpCaching never304="true" />
-    <!-- If you include a <cacheControl> directive, it will be used to
-         generate a Cache-Control header (as well as an Expires header
-         if the value contains "max-age=")
-
-         By default, no Cache-Control header is generated.
-
-         You can use the <cacheControl> option even if you have set
-         never304="true"
-      -->
-    <!--
-       <httpCaching never304="true" >
-         <cacheControl>max-age=30, public</cacheControl>
-       </httpCaching>
-      -->
-    <!-- To enable Solr to respond with automatically generated HTTP
-         Caching headers, and to response to Cache Validation requests
-         correctly, set the value of never304="false"
-
-         This will cause Solr to generate Last-Modified and ETag
-         headers based on the properties of the Index.
-
-         The following options can also be specified to affect the
-         values of these headers...
-
-         lastModFrom - the default value is "openTime" which means the
-         Last-Modified value (and validation against If-Modified-Since
-         requests) will all be relative to when the current Searcher
-         was opened.  You can change it to lastModFrom="dirLastMod" if
-         you want the value to exactly correspond to when the physical
-         index was last modified.
-
-         etagSeed="..." is an option you can change to force the ETag
-         header (and validation against If-None-Match requests) to be
-         different even if the index has not changed (ie: when making
-         significant changes to your config file)
-
-         (lastModifiedFrom and etagSeed are both ignored if you use
-         the never304="true" option)
-      -->
-    <!--
-       <httpCaching lastModifiedFrom="openTime"
-                    etagSeed="Solr">
-         <cacheControl>max-age=30, public</cacheControl>
-       </httpCaching>
-      -->
-  </requestDispatcher>
-
-  <!-- Request Handlers
-
-       http://wiki.apache.org/solr/SolrRequestHandler
-
-       Incoming queries will be dispatched to a specific handler by name
-       based on the path specified in the request.
-
-       If a Request Handler is declared with startup="lazy", then it will
-       not be initialized until the first request that uses it.
-
-    -->
-  <!-- SearchHandler
-
-       http://wiki.apache.org/solr/SearchHandler
-
-       For processing Search Queries, the primary Request Handler
-       provided with Solr is "SearchHandler" It delegates to a sequent
-       of SearchComponents (see below) and supports distributed
-       queries across multiple shards
-    -->
-  <requestHandler name="/select" class="solr.SearchHandler">
-    <!-- default values for query parameters can be specified, these
-         will be overridden by parameters in the request
-      -->
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-      <int name="rows">10</int>
-      <str name="defType">edismax</str>
-      <float name="tie">0.075</float>
-        <str name="qf">
-            dvName^400
-            authorName^180
-            dvSubject^190
-            dvDescription^180
-            dvAffiliation^170
-            title^130
-            subject^120
-            keyword^110
-            topicClassValue^100
-            dsDescriptionValue^90
-            authorAffiliation^80
-            publicationCitation^60
-            producerName^50
-            fileName^30
-            fileDescription^30
-            variableLabel^20
-            variableName^10
-            _text_^1.0
-        </str>
-        <str name="pf">
-            dvName^200
-            authorName^100
-            dvSubject^100
-            dvDescription^100
-            dvAffiliation^100
-            title^75
-            subject^75
-            keyword^75
-            topicClassValue^75
-            dsDescriptionValue^75
-            authorAffiliation^75
-            publicationCitation^75
-            producerName^75
-        </str>
-        <!-- Even though this number is huge it only seems to apply a boost of ~1.5x to final result -MAD 4.9.3--> 
-        <str name="bq">
-            isHarvested:false^25000
-        </str>
-
-      <!-- Default search field
-         <str name="df">text</str> 
-        -->
-      <!-- Change from JSON to XML format (the default prior to Solr 7.0)
-         <str name="wt">xml</str> 
-        -->
-    </lst>
-    <!-- In addition to defaults, "appends" params can be specified
-         to identify values which should be appended to the list of
-         multi-val params from the query (or the existing "defaults").
-      -->
-    <!-- In this example, the param "fq=instock:true" would be appended to
-         any query time fq params the user may specify, as a mechanism for
-         partitioning the index, independent of any user selected filtering
-         that may also be desired (perhaps as a result of faceted searching).
-
-         NOTE: there is *absolutely* nothing a client can do to prevent these
-         "appends" values from being used, so don't use this mechanism
-         unless you are sure you always want it.
-      -->
-    <!--
-       <lst name="appends">
-         <str name="fq">inStock:true</str>
-       </lst>
-      -->
-    <!-- "invariants" are a way of letting the Solr maintainer lock down
-         the options available to Solr clients.  Any params values
-         specified here are used regardless of what values may be specified
-         in either the query, the "defaults", or the "appends" params.
-
-         In this example, the facet.field and facet.query params would
-         be fixed, limiting the facets clients can use.  Faceting is
-         not turned on by default - but if the client does specify
-         facet=true in the request, these are the only facets they
-         will be able to see counts for; regardless of what other
-         facet.field or facet.query params they may specify.
-
-         NOTE: there is *absolutely* nothing a client can do to prevent these
-         "invariants" values from being used, so don't use this mechanism
-         unless you are sure you always want it.
-      -->
-    <!--
-       <lst name="invariants">
-         <str name="facet.field">cat</str>
-         <str name="facet.field">manu_exact</str>
-         <str name="facet.query">price:[* TO 500]</str>
-         <str name="facet.query">price:[500 TO *]</str>
-       </lst>
-      -->
-    <!-- If the default list of SearchComponents is not desired, that
-         list can either be overridden completely, or components can be
-         prepended or appended to the default list.  (see below)
-      -->
-    <!--
-       <arr name="components">
-         <str>nameOfCustomComponent1</str>
-         <str>nameOfCustomComponent2</str>
-       </arr>
-      -->
-  </requestHandler>
-
-  <!-- A request handler that returns indented JSON by default -->
-  <requestHandler name="/query" class="solr.SearchHandler">
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-      <str name="wt">json</str>
-      <str name="indent">true</str>
-    </lst>
-  </requestHandler>
-
-
-  <!-- A Robust Example
-
-       This example SearchHandler declaration shows off usage of the
-       SearchHandler with many defaults declared
-
-       Note that multiple instances of the same Request Handler
-       (SearchHandler) can be registered multiple times with different
-       names (and different init parameters)
-    -->
-  <requestHandler name="/browse" class="solr.SearchHandler" useParams="query,facets,velocity,browse">
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-    </lst>
-  </requestHandler>
-
-  <initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell,/browse">
-    <lst name="defaults">
-      <str name="df">_text_</str>
-    </lst>
-  </initParams>
-
-  <!-- Solr Cell Update Request Handler
-
-       http://wiki.apache.org/solr/ExtractingRequestHandler
-
-    -->
-  <requestHandler name="/update/extract"
-                  startup="lazy"
-                  class="solr.extraction.ExtractingRequestHandler" >
-    <lst name="defaults">
-      <str name="lowernames">true</str>
-      <str name="fmap.meta">ignored_</str>
-      <str name="fmap.content">_text_</str>
-    </lst>
-  </requestHandler>
-
-  <!-- Search Components
-
-       Search components are registered to SolrCore and used by
-       instances of SearchHandler (which can access them by name)
-
-       By default, the following components are available:
-
-       <searchComponent name="query"     class="solr.QueryComponent" />
-       <searchComponent name="facet"     class="solr.FacetComponent" />
-       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
-       <searchComponent name="highlight" class="solr.HighlightComponent" />
-       <searchComponent name="stats"     class="solr.StatsComponent" />
-       <searchComponent name="debug"     class="solr.DebugComponent" />
-
-       Default configuration in a requestHandler would look like:
-
-       <arr name="components">
-         <str>query</str>
-         <str>facet</str>
-         <str>mlt</str>
-         <str>highlight</str>
-         <str>stats</str>
-         <str>debug</str>
-       </arr>
-
-       If you register a searchComponent to one of the standard names,
-       that will be used instead of the default.
-
-       To insert components before or after the 'standard' components, use:
-
-       <arr name="first-components">
-         <str>myFirstComponentName</str>
-       </arr>
-
-       <arr name="last-components">
-         <str>myLastComponentName</str>
-       </arr>
-
-       NOTE: The component registered with the name "debug" will
-       always be executed after the "last-components"
-
-     -->
-
-  <!-- Spell Check
-
-       The spell check component can return a list of alternative spelling
-       suggestions.
-
-       http://wiki.apache.org/solr/SpellCheckComponent
-    -->
-  <searchComponent name="spellcheck" class="solr.SpellCheckComponent">
-
-    <str name="queryAnalyzerFieldType">text_general</str>
-
-    <!-- Multiple "Spell Checkers" can be declared and used by this
-         component
-      -->
-
-    <!-- a spellchecker built from a field of the main index -->
-    <lst name="spellchecker">
-      <str name="name">default</str>
-      <str name="field">_text_</str>
-      <str name="classname">solr.DirectSolrSpellChecker</str>
-      <!-- the spellcheck distance measure used, the default is the internal levenshtein -->
-      <str name="distanceMeasure">internal</str>
-      <!-- minimum accuracy needed to be considered a valid spellcheck suggestion -->
-      <float name="accuracy">0.5</float>
-      <!-- the maximum #edits we consider when enumerating terms: can be 1 or 2 -->
-      <int name="maxEdits">2</int>
-      <!-- the minimum shared prefix when enumerating terms -->
-      <int name="minPrefix">1</int>
-      <!-- maximum number of inspections per result. -->
-      <int name="maxInspections">5</int>
-      <!-- minimum length of a query term to be considered for correction -->
-      <int name="minQueryLength">4</int>
-      <!-- maximum threshold of documents a query term can appear to be considered for correction -->
-      <float name="maxQueryFrequency">0.01</float>
-      <!-- uncomment this to require suggestions to occur in 1% of the documents
-        <float name="thresholdTokenFrequency">.01</float>
-      -->
-    </lst>
-
-    <!-- a spellchecker that can break or combine words.  See "/spell" handler below for usage -->
-    <!--
-    <lst name="spellchecker">
-      <str name="name">wordbreak</str>
-      <str name="classname">solr.WordBreakSolrSpellChecker</str>
-      <str name="field">name</str>
-      <str name="combineWords">true</str>
-      <str name="breakWords">true</str>
-      <int name="maxChanges">10</int>
-    </lst>
-    -->
-  </searchComponent>
-
-  <!-- A request handler for demonstrating the spellcheck component.
-
-       NOTE: This is purely as an example.  The whole purpose of the
-       SpellCheckComponent is to hook it into the request handler that
-       handles your normal user queries so that a separate request is
-       not needed to get suggestions.
-
-       IN OTHER WORDS, THERE IS REALLY GOOD CHANCE THE SETUP BELOW IS
-       NOT WHAT YOU WANT FOR YOUR PRODUCTION SYSTEM!
-
-       See http://wiki.apache.org/solr/SpellCheckComponent for details
-       on the request parameters.
-    -->
-  <requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
-    <lst name="defaults">
-      <!-- Solr will use suggestions from both the 'default' spellchecker
-           and from the 'wordbreak' spellchecker and combine them.
-           collations (re-written queries) can include a combination of
-           corrections from both spellcheckers -->
-      <str name="spellcheck.dictionary">default</str>
-      <str name="spellcheck">on</str>
-      <str name="spellcheck.extendedResults">true</str>
-      <str name="spellcheck.count">10</str>
-      <str name="spellcheck.alternativeTermCount">5</str>
-      <str name="spellcheck.maxResultsForSuggest">5</str>
-      <str name="spellcheck.collate">true</str>
-      <str name="spellcheck.collateExtendedResults">true</str>
-      <str name="spellcheck.maxCollationTries">10</str>
-      <str name="spellcheck.maxCollations">5</str>
-    </lst>
-    <arr name="last-components">
-      <str>spellcheck</str>
-    </arr>
-  </requestHandler>
-
-  <!-- Term Vector Component
-
-       http://wiki.apache.org/solr/TermVectorComponent
-    -->
-  <searchComponent name="tvComponent" class="solr.TermVectorComponent"/>
-
-  <!-- A request handler for demonstrating the term vector component
-
-       This is purely as an example.
-
-       In reality you will likely want to add the component to your
-       already specified request handlers.
-    -->
-  <requestHandler name="/tvrh" class="solr.SearchHandler" startup="lazy">
-    <lst name="defaults">
-      <bool name="tv">true</bool>
-    </lst>
-    <arr name="last-components">
-      <str>tvComponent</str>
-    </arr>
-  </requestHandler>
-
-  <!-- Clustering Component. (Omitted here. See the default Solr example for a typical configuration.) -->
-
-  <!-- Terms Component
-
-       http://wiki.apache.org/solr/TermsComponent
-
-       A component to return terms and document frequency of those
-       terms
-    -->
-  <searchComponent name="terms" class="solr.TermsComponent"/>
-
-  <!-- A request handler for demonstrating the terms component -->
-  <requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
-    <lst name="defaults">
-      <bool name="terms">true</bool>
-      <bool name="distrib">false</bool>
-    </lst>
-    <arr name="components">
-      <str>terms</str>
-    </arr>
-  </requestHandler>
-
-
-  <!-- Query Elevation Component
-
-       http://wiki.apache.org/solr/QueryElevationComponent
-
-       a search component that enables you to configure the top
-       results for a given query regardless of the normal lucene
-       scoring.
-    -->
-  <searchComponent name="elevator" class="solr.QueryElevationComponent" >
-    <!-- pick a fieldType to analyze queries -->
-    <str name="queryFieldType">string</str>
-  </searchComponent>
-
-  <!-- A request handler for demonstrating the elevator component -->
-  <requestHandler name="/elevate" class="solr.SearchHandler" startup="lazy">
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-    </lst>
-    <arr name="last-components">
-      <str>elevator</str>
-    </arr>
-  </requestHandler>
-
-  <!-- Highlighting Component
-
-       http://wiki.apache.org/solr/HighlightingParameters
-    -->
-  <searchComponent class="solr.HighlightComponent" name="highlight">
-    <highlighting>
-      <!-- Configure the standard fragmenter -->
-      <!-- This could most likely be commented out in the "default" case -->
-      <fragmenter name="gap"
-                  default="true"
-                  class="solr.highlight.GapFragmenter">
-        <lst name="defaults">
-          <int name="hl.fragsize">100</int>
-        </lst>
-      </fragmenter>
-
-      <!-- A regular-expression-based fragmenter
-           (for sentence extraction)
-        -->
-      <fragmenter name="regex"
-                  class="solr.highlight.RegexFragmenter">
-        <lst name="defaults">
-          <!-- slightly smaller fragsizes work better because of slop -->
-          <int name="hl.fragsize">70</int>
-          <!-- allow 50% slop on fragment sizes -->
-          <float name="hl.regex.slop">0.5</float>
-          <!-- a basic sentence pattern -->
-          <str name="hl.regex.pattern">[-\w ,/\n\&quot;&apos;]{20,200}</str>
-        </lst>
-      </fragmenter>
-
-      <!-- Configure the standard formatter -->
-      <formatter name="html"
-                 default="true"
-                 class="solr.highlight.HtmlFormatter">
-        <lst name="defaults">
-          <str name="hl.simple.pre"><![CDATA[<em>]]></str>
-          <str name="hl.simple.post"><![CDATA[</em>]]></str>
-        </lst>
-      </formatter>
-
-      <!-- Configure the standard encoder -->
-      <encoder name="html"
-               class="solr.highlight.HtmlEncoder" />
-
-      <!-- Configure the standard fragListBuilder -->
-      <fragListBuilder name="simple"
-                       class="solr.highlight.SimpleFragListBuilder"/>
-
-      <!-- Configure the single fragListBuilder -->
-      <fragListBuilder name="single"
-                       class="solr.highlight.SingleFragListBuilder"/>
-
-      <!-- Configure the weighted fragListBuilder -->
-      <fragListBuilder name="weighted"
-                       default="true"
-                       class="solr.highlight.WeightedFragListBuilder"/>
-
-      <!-- default tag FragmentsBuilder -->
-      <fragmentsBuilder name="default"
-                        default="true"
-                        class="solr.highlight.ScoreOrderFragmentsBuilder">
-        <!--
-        <lst name="defaults">
-          <str name="hl.multiValuedSeparatorChar">/</str>
-        </lst>
-        -->
-      </fragmentsBuilder>
-
-      <!-- multi-colored tag FragmentsBuilder -->
-      <fragmentsBuilder name="colored"
-                        class="solr.highlight.ScoreOrderFragmentsBuilder">
-        <lst name="defaults">
-          <str name="hl.tag.pre"><![CDATA[
-               <b style="background:yellow">,<b style="background:lawgreen">,
-               <b style="background:aquamarine">,<b style="background:magenta">,
-               <b style="background:palegreen">,<b style="background:coral">,
-               <b style="background:wheat">,<b style="background:khaki">,
-               <b style="background:lime">,<b style="background:deepskyblue">]]></str>
-          <str name="hl.tag.post"><![CDATA[</b>]]></str>
-        </lst>
-      </fragmentsBuilder>
-
-      <boundaryScanner name="default"
-                       default="true"
-                       class="solr.highlight.SimpleBoundaryScanner">
-        <lst name="defaults">
-          <str name="hl.bs.maxScan">10</str>
-          <str name="hl.bs.chars">.,!? &#9;&#10;&#13;</str>
-        </lst>
-      </boundaryScanner>
-
-      <boundaryScanner name="breakIterator"
-                       class="solr.highlight.BreakIteratorBoundaryScanner">
-        <lst name="defaults">
-          <!-- type should be one of CHARACTER, WORD(default), LINE and SENTENCE -->
-          <str name="hl.bs.type">WORD</str>
-          <!-- language and country are used when constructing Locale object.  -->
-          <!-- And the Locale object will be used when getting instance of BreakIterator -->
-          <str name="hl.bs.language">en</str>
-          <str name="hl.bs.country">US</str>
-        </lst>
-      </boundaryScanner>
-    </highlighting>
-  </searchComponent>
-
-  <!-- Update Processors
-
-       Chains of Update Processor Factories for dealing with Update
-       Requests can be declared, and then used by name in Update
-       Request Processors
-
-       http://wiki.apache.org/solr/UpdateRequestProcessor
-
-    -->
-
-  <!-- Add unknown fields to the schema
-
-       Field type guessing update processors that will
-       attempt to parse string-typed field values as Booleans, Longs,
-       Doubles, or Dates, and then add schema fields with the guessed
-       field types. Text content will be indexed as "text_general" as
-       well as a copy to a plain string version in *_str.
-
-       These require that the schema is both managed and mutable, by
-       declaring schemaFactory as ManagedIndexSchemaFactory, with
-       mutable specified as true.
-
-       See http://wiki.apache.org/solr/GuessingFieldTypes
-    -->
-<schemaFactory class="ClassicIndexSchemaFactory"/>
-
-  <updateProcessor class="solr.UUIDUpdateProcessorFactory" name="uuid"/>
-  <updateProcessor class="solr.RemoveBlankFieldUpdateProcessorFactory" name="remove-blank"/>
-  <updateProcessor class="solr.FieldNameMutatingUpdateProcessorFactory" name="field-name-mutating">
-    <str name="pattern">[^\w-\.]</str>
-    <str name="replacement">_</str>
-  </updateProcessor>
-  <updateProcessor class="solr.ParseBooleanFieldUpdateProcessorFactory" name="parse-boolean"/>
-  <updateProcessor class="solr.ParseLongFieldUpdateProcessorFactory" name="parse-long"/>
-  <updateProcessor class="solr.ParseDoubleFieldUpdateProcessorFactory" name="parse-double"/>
-  <updateProcessor class="solr.ParseDateFieldUpdateProcessorFactory" name="parse-date">
-    <arr name="format">
-      <str>yyyy-MM-dd'T'HH:mm:ss.SSSZ</str>
-      <str>yyyy-MM-dd'T'HH:mm:ss,SSSZ</str>
-      <str>yyyy-MM-dd'T'HH:mm:ss.SSS</str>
-      <str>yyyy-MM-dd'T'HH:mm:ss,SSS</str>
-      <str>yyyy-MM-dd'T'HH:mm:ssZ</str>
-      <str>yyyy-MM-dd'T'HH:mm:ss</str>
-      <str>yyyy-MM-dd'T'HH:mmZ</str>
-      <str>yyyy-MM-dd'T'HH:mm</str>
-      <str>yyyy-MM-dd HH:mm:ss.SSSZ</str>
-      <str>yyyy-MM-dd HH:mm:ss,SSSZ</str>
-      <str>yyyy-MM-dd HH:mm:ss.SSS</str>
-      <str>yyyy-MM-dd HH:mm:ss,SSS</str>
-      <str>yyyy-MM-dd HH:mm:ssZ</str>
-      <str>yyyy-MM-dd HH:mm:ss</str>
-      <str>yyyy-MM-dd HH:mmZ</str>
-      <str>yyyy-MM-dd HH:mm</str>
-      <str>yyyy-MM-dd</str>
-    </arr>
-  </updateProcessor>
-
-  <!--Dataverse removed-->
-<!--  <updateProcessor class="solr.AddSchemaFieldsUpdateProcessorFactory" name="add-schema-fields">
-    <lst name="typeMapping">
-      <str name="valueClass">java.lang.String</str>
-      <str name="fieldType">text_general</str>
-      <lst name="copyField">
-        <str name="dest">*_str</str>
-        <int name="maxChars">256</int>
-      </lst>
-
-      <bool name="default">true</bool>
-    </lst>
-    <lst name="typeMapping">
-      <str name="valueClass">java.lang.Boolean</str>
-      <str name="fieldType">booleans</str>
-    </lst>
-    <lst name="typeMapping">
-      <str name="valueClass">java.util.Date</str>
-      <str name="fieldType">pdates</str>
-    </lst>
-    <lst name="typeMapping">
-      <str name="valueClass">java.lang.Long</str>
-      <str name="valueClass">java.lang.Integer</str>
-      <str name="fieldType">plongs</str>
-    </lst>
-    <lst name="typeMapping">
-      <str name="valueClass">java.lang.Number</str>
-      <str name="fieldType">pdoubles</str>
-    </lst>
-    </updateProcessor> -->
-
-  <!-- The update.autoCreateFields property can be turned to false to disable schemaless mode -->
-  <updateRequestProcessorChain name="add-unknown-fields-to-the-schema" default="${update.autoCreateFields:false}"
-           processor="uuid,remove-blank,field-name-mutating,parse-boolean,parse-long,parse-double,parse-date">
-    <processor class="solr.LogUpdateProcessorFactory"/>
-    <processor class="solr.DistributedUpdateProcessorFactory"/>
-    <processor class="solr.RunUpdateProcessorFactory"/>
-  </updateRequestProcessorChain>
-
-  <!-- Deduplication
-
-       An example dedup update processor that creates the "id" field
-       on the fly based on the hash code of some other fields.  This
-       example has overwriteDupes set to false since we are using the
-       id field as the signatureField and Solr will maintain
-       uniqueness based on that anyway.
-
-    -->
-  <!--
-     <updateRequestProcessorChain name="dedupe">
-       <processor class="solr.processor.SignatureUpdateProcessorFactory">
-         <bool name="enabled">true</bool>
-         <str name="signatureField">id</str>
-         <bool name="overwriteDupes">false</bool>
-         <str name="fields">name,features,cat</str>
-         <str name="signatureClass">solr.processor.Lookup3Signature</str>
-       </processor>
-       <processor class="solr.LogUpdateProcessorFactory" />
-       <processor class="solr.RunUpdateProcessorFactory" />
-     </updateRequestProcessorChain>
-    -->
-
-  <!-- Language identification
-
-       This example update chain identifies the language of the incoming
-       documents using the langid contrib. The detected language is
-       written to field language_s. No field name mapping is done.
-       The fields used for detection are text, title, subject and description,
-       making this example suitable for detecting languages form full-text
-       rich documents injected via ExtractingRequestHandler.
-       See more about langId at http://wiki.apache.org/solr/LanguageDetection
-    -->
-  <!--
-   <updateRequestProcessorChain name="langid">
-     <processor class="org.apache.solr.update.processor.TikaLanguageIdentifierUpdateProcessorFactory">
-       <str name="langid.fl">text,title,subject,description</str>
-       <str name="langid.langField">language_s</str>
-       <str name="langid.fallback">en</str>
-     </processor>
-     <processor class="solr.LogUpdateProcessorFactory" />
-     <processor class="solr.RunUpdateProcessorFactory" />
-   </updateRequestProcessorChain>
-  -->
-
-  <!-- Script update processor
-
-    This example hooks in an update processor implemented using JavaScript.
-
-    See more about the script update processor at http://wiki.apache.org/solr/ScriptUpdateProcessor
-  -->
-  <!--
-    <updateRequestProcessorChain name="script">
-      <processor class="solr.StatelessScriptUpdateProcessorFactory">
-        <str name="script">update-script.js</str>
-        <lst name="params">
-          <str name="config_param">example config parameter</str>
-        </lst>
-      </processor>
-      <processor class="solr.RunUpdateProcessorFactory" />
-    </updateRequestProcessorChain>
-  -->
-
-  <!-- Response Writers
-
-       http://wiki.apache.org/solr/QueryResponseWriter
-
-       Request responses will be written using the writer specified by
-       the 'wt' request parameter matching the name of a registered
-       writer.
-
-       The "default" writer is the default and will be used if 'wt' is
-       not specified in the request.
-    -->
-  <!-- The following response writers are implicitly configured unless
-       overridden...
-    -->
-  <!--
-     <queryResponseWriter name="xml"
-                          default="true"
-                          class="solr.XMLResponseWriter" />
-     <queryResponseWriter name="json" class="solr.JSONResponseWriter"/>
-     <queryResponseWriter name="python" class="solr.PythonResponseWriter"/>
-     <queryResponseWriter name="ruby" class="solr.RubyResponseWriter"/>
-     <queryResponseWriter name="php" class="solr.PHPResponseWriter"/>
-     <queryResponseWriter name="phps" class="solr.PHPSerializedResponseWriter"/>
-     <queryResponseWriter name="csv" class="solr.CSVResponseWriter"/>
-     <queryResponseWriter name="schema.xml" class="solr.SchemaXmlResponseWriter"/>
-    -->
-
-  <queryResponseWriter name="json" class="solr.JSONResponseWriter">
-    <!-- For the purposes of the tutorial, JSON responses are written as
-     plain text so that they are easy to read in *any* browser.
-     If you expect a MIME type of "application/json" just remove this override.
-    -->
-    <str name="content-type">text/plain; charset=UTF-8</str>
-  </queryResponseWriter>
-
-  <!--
-     Custom response writers can be declared as needed...
-    -->
-  <queryResponseWriter name="velocity" class="solr.VelocityResponseWriter" startup="lazy">
-    <str name="template.base.dir">${velocity.template.base.dir:}</str>
-    <str name="solr.resource.loader.enabled">${velocity.solr.resource.loader.enabled:true}</str>
-    <str name="params.resource.loader.enabled">${velocity.params.resource.loader.enabled:false}</str>
-  </queryResponseWriter>
-
-  <!-- XSLT response writer transforms the XML output by any xslt file found
-       in Solr's conf/xslt directory.  Changes to xslt files are checked for
-       every xsltCacheLifetimeSeconds.
-    -->
-  <queryResponseWriter name="xslt" class="solr.XSLTResponseWriter">
-    <int name="xsltCacheLifetimeSeconds">5</int>
-  </queryResponseWriter>
-
-  <!-- Query Parsers
-
-       https://lucene.apache.org/solr/guide/query-syntax-and-parsing.html
-
-       Multiple QParserPlugins can be registered by name, and then
-       used in either the "defType" param for the QueryComponent (used
-       by SearchHandler) or in LocalParams
-    -->
-  <!-- example of registering a query parser -->
-  <!--
-     <queryParser name="myparser" class="com.mycompany.MyQParserPlugin"/>
-    -->
-
-  <!-- Function Parsers
-
-       http://wiki.apache.org/solr/FunctionQuery
-
-       Multiple ValueSourceParsers can be registered by name, and then
-       used as function names when using the "func" QParser.
-    -->
-  <!-- example of registering a custom function parser  -->
-  <!--
-     <valueSourceParser name="myfunc"
-                        class="com.mycompany.MyValueSourceParser" />
-    -->
-
-
-  <!-- Document Transformers
-       http://wiki.apache.org/solr/DocTransformers
-    -->
-  <!--
-     Could be something like:
-     <transformer name="db" class="com.mycompany.LoadFromDatabaseTransformer" >
-       <int name="connection">jdbc://....</int>
-     </transformer>
-
-     To add a constant value to all docs, use:
-     <transformer name="mytrans2" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
-       <int name="value">5</int>
-     </transformer>
-
-     If you want the user to still be able to change it with _value:something_ use this:
-     <transformer name="mytrans3" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
-       <double name="defaultValue">5</double>
-     </transformer>
-
-      If you are using the QueryElevationComponent, you may wish to mark documents that get boosted.  The
-      EditorialMarkerFactory will do exactly that:
-     <transformer name="qecBooster" class="org.apache.solr.response.transform.EditorialMarkerFactory" />
-    -->
-</config>
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+     For more details about configurations options that may appear in
+     this file, see https://solr.apache.org/guide/solr/latest/configuration-guide/configuring-solrconfig-xml.html.
+-->
+<config>
+  <!-- In all configuration below, a prefix of "solr." for class names
+       is an alias that causes solr to search appropriate packages,
+       including org.apache.solr.(search|update|request|core|analysis)
+
+       You may also specify a fully qualified Java classname if you
+       have your own custom plugins.
+    -->
+
+  <!-- Controls what version of Lucene various components of Solr
+       adhere to.  Generally, you want to use the latest version to
+       get all bug fixes and improvements. It is highly recommended
+       that you fully re-index after changing this setting as it can
+       affect both how text is indexed and queried.
+  -->
+  <luceneMatchVersion>9.7</luceneMatchVersion>
+
+  <!-- <lib/> directives can be used to instruct Solr to load any Jars
+       identified and use them to resolve any "plugins" specified in
+       your solrconfig.xml or schema.xml (ie: Analyzers, Request
+       Handlers, etc...).
+
+       All directories and paths are resolved relative to the
+       instanceDir.
+
+       Please note that <lib/> directives are processed in the order
+       that they appear in your solrconfig.xml file, and are "stacked"
+       on top of each other when building a ClassLoader - so if you have
+       plugin jars with dependencies on other jars, the "lower level"
+       dependency jars should be loaded first.
+
+       If a "./lib" directory exists in your instanceDir, all files
+       found in it are included as if you had used the following
+       syntax...
+
+              <lib dir="./lib" />
+    -->
+
+  <!-- A 'dir' option by itself adds any files found in the directory
+       to the classpath, this is useful for including all jars in a
+       directory.
+
+       When a 'regex' is specified in addition to a 'dir', only the
+       files in that directory which completely match the regex
+       (anchored on both ends) will be included.
+
+       If a 'dir' option (with or without a regex) is used and nothing
+       is found that matches, a warning will be logged.
+
+       The example below can be used to load a Solr Module along
+       with their external dependencies.
+    -->
+    <!-- <lib dir="${solr.install.dir:../../../..}/modules/ltr/lib" regex=".*\.jar" /> -->
+
+  <!-- an exact 'path' can be used instead of a 'dir' to specify a
+       specific jar file.  This will cause a serious error to be logged
+       if it can't be loaded.
+    -->
+  <!--
+     <lib path="../a-jar-that-does-not-exist.jar" />
+  -->
+
+  <!-- Data Directory
+
+       Used to specify an alternate directory to hold all index data
+       other than the default ./data under the Solr home.  If
+       replication is in use, this should match the replication
+       configuration.
+    -->
+  <dataDir>${solr.data.dir:}</dataDir>
+
+
+  <!-- The DirectoryFactory to use for indexes.
+
+       solr.StandardDirectoryFactory is filesystem
+       based and tries to pick the best implementation for the current
+       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
+       wraps solr.StandardDirectoryFactory and caches small files in memory
+       for better NRT performance.
+
+       One can force a particular implementation via solr.MMapDirectoryFactory
+       or solr.NIOFSDirectoryFactory.
+
+       solr.RAMDirectoryFactory is memory based and not persistent.
+    -->
+  <directoryFactory name="DirectoryFactory"
+                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
+
+  <!-- The CodecFactory for defining the format of the inverted index.
+       The default implementation is SchemaCodecFactory, which is the official Lucene
+       index format, but hooks into the schema to provide per-field customization of
+       the postings lists and per-document values in the fieldType element
+       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
+       are experimental, so if you choose to customize the index format, it's a good
+       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
+       before upgrading to a newer version to avoid unnecessary reindexing.
+       A "compressionMode" string element can be added to <codecFactory> to choose
+       between the existing compression modes in the default codec: "BEST_SPEED" (default)
+       or "BEST_COMPRESSION".
+  -->
+  <codecFactory class="solr.SchemaCodecFactory"/>
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Index Config - These settings control low-level behavior of indexing
+       Most example settings here show the default value, but are commented
+       out, to more easily see where customizations have been made.
+
+       Note: This replaces <indexDefaults> and <mainIndex> from older versions
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <indexConfig>
+    <!-- maxFieldLength was removed in 4.0. To get similar behavior, include a
+         LimitTokenCountFilterFactory in your fieldType definition. E.g.
+     <filter class="solr.LimitTokenCountFilterFactory" maxTokenCount="10000"/>
+    -->
+    <!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 -->
+    <!-- <writeLockTimeout>1000</writeLockTimeout>  -->
+
+    <!-- Expert: Enabling compound file will use less files for the index,
+         using fewer file descriptors on the expense of performance decrease.
+         Default in Lucene is "true". Default in Solr is "false" (since 3.6) -->
+    <!-- <useCompoundFile>false</useCompoundFile> -->
+
+    <!-- ramBufferSizeMB sets the amount of RAM that may be used by Lucene
+         indexing for buffering added documents and deletions before they are
+         flushed to the Directory.
+         maxBufferedDocs sets a limit on the number of documents buffered
+         before flushing.
+         If both ramBufferSizeMB and maxBufferedDocs is set, then
+         Lucene will flush based on whichever limit is hit first.  -->
+    <!-- <ramBufferSizeMB>100</ramBufferSizeMB> -->
+    <!-- <maxBufferedDocs>1000</maxBufferedDocs> -->
+
+    <!-- Expert: ramPerThreadHardLimitMB sets the maximum amount of RAM that can be consumed
+         per thread before they are flushed. When limit is exceeded, this triggers a forced
+         flush even if ramBufferSizeMB has not been exceeded.
+         This is a safety limit to prevent Lucene's DocumentsWriterPerThread from address space
+         exhaustion due to its internal 32 bit signed integer based memory addressing.
+         The specified value should be greater than 0 and less than 2048MB. When not specified,
+         Solr uses Lucene's default value 1945. -->
+    <!-- <ramPerThreadHardLimitMB>1945</ramPerThreadHardLimitMB> -->
+
+    <!-- Expert: Merge Policy
+         The Merge Policy in Lucene controls how merging of segments is done.
+         The default since Solr/Lucene 3.3 is TieredMergePolicy.
+         The default since Lucene 2.3 was the LogByteSizeMergePolicy,
+         Even older versions of Lucene used LogDocMergePolicy.
+      -->
+    <!--
+        <mergePolicyFactory class="org.apache.solr.index.TieredMergePolicyFactory">
+          <int name="maxMergeAtOnce">10</int>
+          <int name="segmentsPerTier">10</int>
+          <double name="noCFSRatio">0.1</double>
+        </mergePolicyFactory>
+      -->
+
+    <!-- Expert: Merge Scheduler
+         The Merge Scheduler in Lucene controls how merges are
+         performed.  The ConcurrentMergeScheduler (Lucene 2.3 default)
+         can perform merges in the background using separate threads.
+         The SerialMergeScheduler (Lucene 2.2 default) does not.
+     -->
+    <!--
+       <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
+       -->
+
+    <!-- LockFactory
+
+         This option specifies which Lucene LockFactory implementation
+         to use.
+
+         single = SingleInstanceLockFactory - suggested for a
+                  read-only index or when there is no possibility of
+                  another process trying to modify the index.
+         native = NativeFSLockFactory - uses OS native file locking.
+                  Do not use when multiple solr webapps in the same
+                  JVM are attempting to share a single index.
+         simple = SimpleFSLockFactory  - uses a plain file for locking
+
+         Defaults: 'native' is default for Solr3.6 and later, otherwise
+                   'simple' is the default
+
+         More details on the nuances of each LockFactory...
+         https://cwiki.apache.org/confluence/display/lucene/AvailableLockFactories
+    -->
+    <lockType>${solr.lock.type:native}</lockType>
+
+    <!-- Commit Deletion Policy
+         Custom deletion policies can be specified here. The class must
+         implement org.apache.lucene.index.IndexDeletionPolicy.
+
+         The default Solr IndexDeletionPolicy implementation supports
+         deleting index commit points on number of commits, age of
+         commit point and optimized status.
+
+         The latest commit point should always be preserved regardless
+         of the criteria.
+    -->
+    <!--
+    <deletionPolicy class="solr.SolrDeletionPolicy">
+    -->
+    <!-- The number of commit points to be kept -->
+    <!-- <str name="maxCommitsToKeep">1</str> -->
+    <!-- The number of optimized commit points to be kept -->
+    <!-- <str name="maxOptimizedCommitsToKeep">0</str> -->
+    <!--
+        Delete all commit points once they have reached the given age.
+        Supports DateMathParser syntax e.g.
+      -->
+    <!--
+       <str name="maxCommitAge">30MINUTES</str>
+       <str name="maxCommitAge">1DAY</str>
+    -->
+    <!--
+    </deletionPolicy>
+    -->
+
+    <!-- Lucene Infostream
+
+         To aid in advanced debugging, Lucene provides an "InfoStream"
+         of detailed information when indexing.
+
+         Setting The value to true will instruct the underlying Lucene
+         IndexWriter to write its debugging info the specified file
+      -->
+    <!-- <infoStream file="INFOSTREAM.txt">false</infoStream> -->
+  </indexConfig>
+
+  <!-- The default high-performance update handler -->
+  <updateHandler class="solr.DirectUpdateHandler2">
+
+    <!-- Enables a transaction log, used for real-time get, durability, and
+         and solr cloud replica recovery.  The log can grow as big as
+         uncommitted changes to the index, so use of a hard autoCommit
+         is recommended (see below).
+         "dir" - the target directory for transaction logs, defaults to the
+                solr data directory.
+         "numVersionBuckets" - sets the number of buckets used to keep
+                track of max version values when checking for re-ordered
+                updates; increase this value to reduce the cost of
+                synchronizing access to version buckets during high-volume
+                indexing, this requires 8 bytes (long) * numVersionBuckets
+                of heap space per Solr core.
+    -->
+    <updateLog>
+      <str name="dir">${solr.ulog.dir:}</str>
+      <int name="numVersionBuckets">${solr.ulog.numVersionBuckets:65536}</int>
+    </updateLog>
+
+    <!-- AutoCommit
+
+         Perform a hard commit automatically under certain conditions.
+         Instead of enabling autoCommit, consider using "commitWithin"
+         when adding documents.
+
+         https://solr.apache.org/guide/solr/latest/indexing-guide/indexing-with-update-handlers.html
+
+         maxDocs - Maximum number of documents to add since the last
+                   commit before automatically triggering a new commit.
+
+         maxTime - Maximum amount of time in ms that is allowed to pass
+                   since a document was added before automatically
+                   triggering a new commit.
+         openSearcher - if false, the commit causes recent index changes
+           to be flushed to stable storage, but does not cause a new
+           searcher to be opened to make those changes visible.
+
+         If the updateLog is enabled, then it's highly recommended to
+         have some sort of hard autoCommit to limit the log size.
+      -->
+    <autoCommit>
+      <maxTime>${solr.autoCommit.maxTime:15000}</maxTime>
+      <openSearcher>false</openSearcher>
+    </autoCommit>
+
+    <!-- softAutoCommit is like autoCommit except it causes a
+         'soft' commit which only ensures that changes are visible
+         but does not ensure that data is synced to disk.  This is
+         faster and more near-realtime friendly than a hard commit.
+      -->
+
+    <autoSoftCommit>
+      <maxTime>${solr.autoSoftCommit.maxTime:-1}</maxTime>
+    </autoSoftCommit>
+
+    <!-- Update Related Event Listeners
+
+         Various IndexWriter related events can trigger Listeners to
+         take actions.
+
+         postCommit - fired after every commit or optimize command
+         postOptimize - fired after every optimize command
+      -->
+
+  </updateHandler>
+
+  <!-- IndexReaderFactory
+
+       Use the following format to specify a custom IndexReaderFactory,
+       which allows for alternate IndexReader implementations.
+
+       ** Experimental Feature **
+
+       Please note - Using a custom IndexReaderFactory may prevent
+       certain other features from working. The API to
+       IndexReaderFactory may change without warning or may even be
+       removed from future releases if the problems cannot be
+       resolved.
+
+
+       ** Features that may not work with custom IndexReaderFactory **
+
+       The ReplicationHandler assumes a disk-resident index. Using a
+       custom IndexReader implementation may cause incompatibility
+       with ReplicationHandler and may cause replication to not work
+       correctly. See SOLR-1366 for details.
+
+    -->
+  <!--
+  <indexReaderFactory name="IndexReaderFactory" class="package.class">
+    <str name="someArg">Some Value</str>
+  </indexReaderFactory >
+  -->
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Query section - these settings control query time things like caches
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <query>
+
+    <!-- Maximum number of clauses allowed when parsing a boolean query string.
+
+         This limit only impacts boolean queries specified by a user as part of a query string,
+         and provides per-collection controls on how complex user specified boolean queries can
+         be.  Query strings that specify more clauses than this will result in an error.
+
+         If this per-collection limit is greater than the global `maxBooleanClauses` limit
+         specified in `solr.xml`, it will have no effect, as that setting also limits the size
+         of user specified boolean queries.
+      -->
+    <maxBooleanClauses>${solr.max.booleanClauses:1024}</maxBooleanClauses>
+
+    <!-- Solr Internal Query Caches
+         Starting with Solr 9.0 the default cache implementation used is CaffeineCache.
+    -->
+
+    <!-- Filter Cache
+
+         Cache used by SolrIndexSearcher for filters (DocSets),
+         unordered sets of *all* documents that match a query.  When a
+         new searcher is opened, its caches may be prepopulated or
+         "autowarmed" using data from caches in the old searcher.
+         autowarmCount is the number of items to prepopulate. For
+         CaffeineCache, the autowarmed items will be the most recently
+         accessed items.
+
+         Parameters:
+           class - the SolrCache implementation (CaffeineCache by default)
+           size - the maximum number of entries in the cache
+           initialSize - the initial capacity (number of entries) of
+               the cache.  (see java.util.HashMap)
+           autowarmCount - the number of entries to prepopulate from
+               an old cache.
+           maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
+                      to occupy. Note that when this option is specified, the size
+                      and initialSize parameters are ignored.
+      -->
+    <filterCache size="512"
+                 initialSize="512"
+                 autowarmCount="0"/>
+
+    <!-- Query Result Cache
+
+         Caches results of searches - ordered lists of document ids
+         (DocList) based on a query, a sort, and the range of documents requested.
+         Additional supported parameter by CaffeineCache:
+            maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
+                       to occupy
+      -->
+    <queryResultCache size="512"
+                      initialSize="512"
+                      autowarmCount="0"/>
+
+    <!-- Document Cache
+
+         Caches Lucene Document objects (the stored fields for each
+         document).  Since Lucene internal document ids are transient,
+         this cache will not be autowarmed.
+      -->
+    <documentCache size="512"
+                   initialSize="512"
+                   autowarmCount="0"/>
+
+    <!-- custom cache currently used by block join -->
+    <cache name="perSegFilter"
+           class="solr.CaffeineCache"
+           size="10"
+           initialSize="0"
+           autowarmCount="10"
+           regenerator="solr.NoOpRegenerator" />
+
+    <!-- Field Value Cache
+
+         Cache used to hold field values that are quickly accessible
+         by document id.  The fieldValueCache is created by default
+         even if not configured here.
+      -->
+    <!--
+       <fieldValueCache size="512"
+                        autowarmCount="128"
+                        />
+      -->
+
+    <!-- Custom Cache
+
+         Example of a generic cache.  These caches may be accessed by
+         name through SolrIndexSearcher.getCache(),cacheLookup(), and
+         cacheInsert().  The purpose is to enable easy caching of
+         user/application level data.  The regenerator argument should
+         be specified as an implementation of solr.CacheRegenerator
+         if autowarming is desired.
+      -->
+    <!--
+       <cache name="myUserCache"
+              class="solr.CaffeineCache"
+              size="4096"
+              initialSize="1024"
+              autowarmCount="1024"
+              regenerator="com.mycompany.MyRegenerator"
+              />
+      -->
+
+
+    <!-- Lazy Field Loading
+
+         If true, stored fields that are not requested will be loaded
+         lazily.  This can result in a significant speed improvement
+         if the usual case is to not load all stored fields,
+         especially if the skipped fields are large compressed text
+         fields.
+    -->
+    <enableLazyFieldLoading>true</enableLazyFieldLoading>
+
+    <!-- Use Filter For Sorted Query
+
+         A possible optimization that attempts to use a filter to
+         satisfy a search.  If the requested sort does not include
+         score, then the filterCache will be checked for a filter
+         matching the query. If found, the filter will be used as the
+         source of document ids, and then the sort will be applied to
+         that.
+
+         For most situations, this will not be useful unless you
+         frequently get the same search repeatedly with different sort
+         options, and none of them ever use "score"
+      -->
+    <!--
+       <useFilterForSortedQuery>true</useFilterForSortedQuery>
+      -->
+
+    <!-- Result Window Size
+
+         An optimization for use with the queryResultCache.  When a search
+         is requested, a superset of the requested number of document ids
+         are collected.  For example, if a search for a particular query
+         requests matching documents 10 through 19, and queryWindowSize is 50,
+         then documents 0 through 49 will be collected and cached.  Any further
+         requests in that range can be satisfied via the cache.
+      -->
+    <queryResultWindowSize>20</queryResultWindowSize>
+
+    <!-- Maximum number of documents to cache for any entry in the
+         queryResultCache.
+      -->
+    <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
+
+  <!-- Use Filter For Sorted Query
+
+   A possible optimization that attempts to use a filter to
+   satisfy a search.  If the requested sort does not include
+   score, then the filterCache will be checked for a filter
+   matching the query. If found, the filter will be used as the
+   source of document ids, and then the sort will be applied to
+   that.
+
+   For most situations, this will not be useful unless you
+   frequently get the same search repeatedly with different sort
+   options, and none of them ever use "score"
+-->
+    <!--
+       <useFilterForSortedQuery>true</useFilterForSortedQuery>
+      -->
+
+    <!-- Query Related Event Listeners
+
+         Various IndexSearcher related events can trigger Listeners to
+         take actions.
+
+         newSearcher - fired whenever a new searcher is being prepared
+         and there is a current searcher handling requests (aka
+         registered).  It can be used to prime certain caches to
+         prevent long request times for certain requests.
+
+         firstSearcher - fired whenever a new searcher is being
+         prepared but there is no current registered searcher to handle
+         requests or to gain autowarming data from.
+
+
+      -->
+    <!-- QuerySenderListener takes an array of NamedList and executes a
+         local query request for each NamedList in sequence.
+      -->
+    <listener event="newSearcher" class="solr.QuerySenderListener">
+      <arr name="queries">
+        <!--
+           <lst><str name="q">solr</str><str name="sort">price asc</str></lst>
+           <lst><str name="q">rocks</str><str name="sort">weight asc</str></lst>
+          -->
+      </arr>
+    </listener>
+    <listener event="firstSearcher" class="solr.QuerySenderListener">
+      <arr name="queries">
+        <!--
+        <lst>
+          <str name="q">static firstSearcher warming in solrconfig.xml</str>
+        </lst>
+        -->
+      </arr>
+    </listener>
+
+    <!-- Use Cold Searcher
+
+         If a search request comes in and there is no current
+         registered searcher, then immediately register the still
+         warming searcher and use it.  If "false" then all requests
+         will block until the first searcher is done warming.
+      -->
+    <useColdSearcher>false</useColdSearcher>
+
+  </query>
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+     Circuit Breaker Section - This section consists of configurations for
+     circuit breakers
+     ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <!-- Circuit breakers are designed to allow stability and predictable query
+     execution. They prevent operations that can take down the node and cause
+     noisy neighbour issues.
+
+     The CircuitBreakerManager is the default manager for all circuit breakers.
+     The enabled flag here controls the activation/deactivation of all circuit
+     breakers specified within.
+  -->
+  <circuitBreaker class="solr.CircuitBreakerManager" enabled="true">
+    <!-- Memory Circuit Breaker
+
+     Specific configuration for max JVM heap usage circuit breaker. This configuration defines
+     whether the circuit breaker is enabled and the threshold percentage of maximum heap allocated
+     beyond which queries will be rejected until the current JVM usage goes below the threshold.
+     The valid value for this range is 50-95.
+
+     Consider a scenario where the max heap allocated is 4 GB and memThreshold is defined as 75.
+     Threshold JVM usage will be 4 * 0.75 = 3 GB. Its generally a good idea to keep this value
+     between 75 - 80% of maximum heap allocated.
+
+     If, at any point, the current JVM heap usage goes above 3 GB, queries will be rejected until
+     the heap usage goes below 3 GB again. If you see queries getting rejected with 503 error code,
+     check for "Circuit Breakers tripped" in logs and the corresponding error message should tell
+     you what transpired (if the failure was caused by tripped circuit breakers).
+    -->
+    
+    <!--
+    <str name="memEnabled">true</str>
+    <str name="memThreshold">75</str>
+    -->
+
+    <!-- CPU Circuit Breaker Configuration
+
+     Specific configuration for CPU utilization based circuit breaker. This configuration defines
+     whether the circuit breaker is enabled and the average load over the last minute at which the
+     circuit breaker should start rejecting queries.
+    -->
+    
+    <!--
+    <str name="cpuEnabled">true</str>
+    <str name="cpuThreshold">75</str>
+    -->
+
+  </circuitBreaker>
+
+  <!-- Request Dispatcher
+
+       This section contains instructions for how the SolrDispatchFilter
+       should behave when processing requests for this SolrCore.
+
+    -->
+  <requestDispatcher>
+    <!-- Request Parsing
+
+         These settings indicate how Solr Requests may be parsed, and
+         what restrictions may be placed on the ContentStreams from
+         those requests
+
+         multipartUploadLimitInKB - specifies the max size (in KiB) of
+         Multipart File Uploads that Solr will allow in a Request.
+
+         formdataUploadLimitInKB - specifies the max size (in KiB) of
+         form data (application/x-www-form-urlencoded) sent via
+         POST. You can use POST to pass request parameters not
+         fitting into the URL.
+
+         addHttpRequestToContext - if set to true, it will instruct
+         the requestParsers to include the original HttpServletRequest
+         object in the context map of the SolrQueryRequest under the
+         key "httpRequest". It will not be used by any of the existing
+         Solr components, but may be useful when developing custom
+         plugins.
+
+    <requestParsers multipartUploadLimitInKB="-1"
+                    formdataUploadLimitInKB="-1"
+                    addHttpRequestToContext="false"/>
+      -->
+
+    <!-- HTTP Caching
+
+         Set HTTP caching related parameters (for proxy caches and clients).
+
+         The options below instruct Solr not to output any HTTP Caching
+         related headers
+      -->
+    <httpCaching never304="true" />
+    <!-- If you include a <cacheControl> directive, it will be used to
+         generate a Cache-Control header (as well as an Expires header
+         if the value contains "max-age=")
+
+         By default, no Cache-Control header is generated.
+
+         You can use the <cacheControl> option even if you have set
+         never304="true"
+      -->
+    <!--
+       <httpCaching never304="true" >
+         <cacheControl>max-age=30, public</cacheControl>
+       </httpCaching>
+      -->
+    <!-- To enable Solr to respond with automatically generated HTTP
+         Caching headers, and to response to Cache Validation requests
+         correctly, set the value of never304="false"
+
+         This will cause Solr to generate Last-Modified and ETag
+         headers based on the properties of the Index.
+
+         The following options can also be specified to affect the
+         values of these headers...
+
+         lastModFrom - the default value is "openTime" which means the
+         Last-Modified value (and validation against If-Modified-Since
+         requests) will all be relative to when the current Searcher
+         was opened.  You can change it to lastModFrom="dirLastMod" if
+         you want the value to exactly correspond to when the physical
+         index was last modified.
+
+         etagSeed="..." is an option you can change to force the ETag
+         header (and validation against If-None-Match requests) to be
+         different even if the index has not changed (ie: when making
+         significant changes to your config file)
+
+         (lastModifiedFrom and etagSeed are both ignored if you use
+         the never304="true" option)
+      -->
+    <!--
+       <httpCaching lastModifiedFrom="openTime"
+                    etagSeed="Solr">
+         <cacheControl>max-age=30, public</cacheControl>
+       </httpCaching>
+      -->
+  </requestDispatcher>
+
+  <!-- Request Handlers
+
+       https://solr.apache.org/guide/solr/latest/configuration-guide/requesthandlers-searchcomponents.html
+
+       Incoming queries will be dispatched to a specific handler by name based on the path specified in the request.
+
+       All handlers (Search Handlers, Update Request Handlers, and other specialized types) can have default parameters (defaults, appends and invariants).
+
+       Search Handlers can also (append, prepend or even replace) default or defined Search Components.
+
+       Update Request Handlers can leverage Update Request Processors to pre-process documents after they are loaded
+       and before they are indexed/stored.
+
+       Not all Request Handlers are defined in the solrconfig.xml, many are implicit.
+    -->
+
+  <!-- Primary search handler, expected by most clients, examples and UI frameworks -->
+  <requestHandler name="/select" class="solr.SearchHandler">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+      <int name="rows">10</int>
+      <!--
+        This boosting configuration has been
+        - first introduced in 2015, see https://github.com/IQSS/dataverse/issues/1928#issuecomment-91651853,
+        - been re-introduced in 2018 for Solr 7.2.1 update, see https://github.com/IQSS/dataverse/issues/4158,
+        - and finally evolved to the current state later in 2018 https://github.com/IQSS/dataverse/issues/4938
+          (merged with https://github.com/IQSS/dataverse/commit/3843e5366845d55c327cdb252dd9b4e4125b9b88)
+                
+        Since then, this has not been touched again (2021-12-21).
+      -->
+      <str name="defType">edismax</str>
+      <float name="tie">0.075</float>
+      <str name="qf">
+          dvName^400
+          authorName^180
+          dvSubject^190
+          dvDescription^180
+          dvAffiliation^170
+          title^130
+          subject^120
+          keyword^110
+          topicClassValue^100
+          dsDescriptionValue^90
+          authorAffiliation^80
+          publicationCitation^60
+          producerName^50
+          fileName^30
+          fileDescription^30
+          variableLabel^20
+          variableName^10
+          _text_^1.0
+      </str>
+      <str name="pf">
+          dvName^200
+          authorName^100
+          dvSubject^100
+          dvDescription^100
+          dvAffiliation^100
+          title^75
+          subject^75
+          keyword^75
+          topicClassValue^75
+          dsDescriptionValue^75
+          authorAffiliation^75
+          publicationCitation^75
+          producerName^75
+      </str>
+      <!-- Even though this number is huge it only seems to apply a boost of ~1.5x to final result -MAD 4.9.3 -->
+      <str name="bq">
+          isHarvested:false^25000
+      </str>
+    </lst>
+  </requestHandler>
+
+  <!-- A request handler that returns indented JSON by default -->
+  <requestHandler name="/query" class="solr.SearchHandler">
+    <lst name="defaults">
+      <str name="echoParams">explicit</str>
+      <str name="wt">json</str>
+      <str name="indent">true</str>
+    </lst>
+  </requestHandler>
+
+  <!-- Shared parameters for multiple Request Handlers -->
+  <initParams path="/update/**,/query,/select,/spell">
+    <lst name="defaults">
+      <str name="df">_text_</str>
+    </lst>
+  </initParams>
+
+  <!-- Spell Check
+
+       The spell check component can return a list of alternative spelling
+       suggestions.
+
+       https://solr.apache.org/guide/solr/latest/query-guide/spell-checking.html
+    -->
+  <searchComponent name="spellcheck" class="solr.SpellCheckComponent">
+
+    <str name="queryAnalyzerFieldType">text_general</str>
+
+    <!-- Multiple "Spell Checkers" can be declared and used by this
+         component
+      -->
+
+    <!-- a spellchecker built from a field of the main index -->
+    <lst name="spellchecker">
+      <str name="name">default</str>
+      <str name="field">_text_</str>
+      <str name="classname">solr.DirectSolrSpellChecker</str>
+      <!-- the spellcheck distance measure used, the default is the internal levenshtein -->
+      <str name="distanceMeasure">internal</str>
+      <!-- minimum accuracy needed to be considered a valid spellcheck suggestion -->
+      <float name="accuracy">0.5</float>
+      <!-- the maximum #edits we consider when enumerating terms: can be 1 or 2 -->
+      <int name="maxEdits">2</int>
+      <!-- the minimum shared prefix when enumerating terms -->
+      <int name="minPrefix">1</int>
+      <!-- maximum number of inspections per result. -->
+      <int name="maxInspections">5</int>
+      <!-- minimum length of a query term to be considered for correction -->
+      <int name="minQueryLength">4</int>
+      <!-- maximum threshold of documents a query term can appear to be considered for correction -->
+      <float name="maxQueryFrequency">0.01</float>
+      <!-- uncomment this to require suggestions to occur in 1% of the documents
+        <float name="thresholdTokenFrequency">.01</float>
+      -->
+    </lst>
+
+    <!-- a spellchecker that can break or combine words.  See "/spell" handler below for usage -->
+    <!--
+    <lst name="spellchecker">
+      <str name="name">wordbreak</str>
+      <str name="classname">solr.WordBreakSolrSpellChecker</str>
+      <str name="field">name</str>
+      <str name="combineWords">true</str>
+      <str name="breakWords">true</str>
+      <int name="maxChanges">10</int>
+    </lst>
+    -->
+  </searchComponent>
+
+  <!-- A request handler for demonstrating the spellcheck component.
+
+       NOTE: This is purely as an example.  The whole purpose of the
+       SpellCheckComponent is to hook it into the request handler that
+       handles your normal user queries so that a separate request is
+       not needed to get suggestions.
+
+       IN OTHER WORDS, THERE IS REALLY GOOD CHANCE THE SETUP BELOW IS
+       NOT WHAT YOU WANT FOR YOUR PRODUCTION SYSTEM!
+
+       See https://solr.apache.org/guide/solr/latest/query-guide/spell-checking.html for details
+       on the request parameters.
+    -->
+  <!--
+  <requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
+    <lst name="defaults">
+    -->
+      <!-- Solr will use suggestions from both the 'default' spellchecker
+           and from the 'wordbreak' spellchecker and combine them.
+           collations (re-written queries) can include a combination of
+           corrections from both spellcheckers -->
+  <!--
+      <str name="spellcheck.dictionary">default</str>
+      <str name="spellcheck">on</str>
+      <str name="spellcheck.extendedResults">true</str>
+      <str name="spellcheck.count">10</str>
+      <str name="spellcheck.alternativeTermCount">5</str>
+      <str name="spellcheck.maxResultsForSuggest">5</str>
+      <str name="spellcheck.collate">true</str>
+      <str name="spellcheck.collateExtendedResults">true</str>
+      <str name="spellcheck.maxCollationTries">10</str>
+      <str name="spellcheck.maxCollations">5</str>
+    </lst>
+    <arr name="last-components">
+      <str>spellcheck</str>
+    </arr>
+  </requestHandler>
+  -->
+
+  <!-- Highlighting Component
+
+       https://solr.apache.org/guide/solr/latest/query-guide/highlighting.html
+    -->
+  <searchComponent class="solr.HighlightComponent" name="highlight">
+    <!-- note: the hl.method=unified highlighter is not configured here; it's completely configured
+    via parameters.  The below configuration supports hl.method=original and fastVector. -->
+    <highlighting>
+      <!-- Configure the standard fragmenter -->
+      <!-- This could most likely be commented out in the "default" case -->
+      <fragmenter name="gap"
+                  default="true"
+                  class="solr.highlight.GapFragmenter">
+        <lst name="defaults">
+          <int name="hl.fragsize">100</int>
+        </lst>
+      </fragmenter>
+
+      <!-- A regular-expression-based fragmenter
+           (for sentence extraction)
+        -->
+      <fragmenter name="regex"
+                  class="solr.highlight.RegexFragmenter">
+        <lst name="defaults">
+          <!-- slightly smaller fragsizes work better because of slop -->
+          <int name="hl.fragsize">70</int>
+          <!-- allow 50% slop on fragment sizes -->
+          <float name="hl.regex.slop">0.5</float>
+          <!-- a basic sentence pattern -->
+          <str name="hl.regex.pattern">[-\w ,/\n\&quot;&apos;]{20,200}</str>
+        </lst>
+      </fragmenter>
+
+      <!-- Configure the standard formatter -->
+      <formatter name="html"
+                 default="true"
+                 class="solr.highlight.HtmlFormatter">
+        <lst name="defaults">
+          <str name="hl.simple.pre"><![CDATA[<em>]]></str>
+          <str name="hl.simple.post"><![CDATA[</em>]]></str>
+        </lst>
+      </formatter>
+
+      <!-- Configure the standard encoder -->
+      <encoder name="html"
+               class="solr.highlight.HtmlEncoder" />
+
+      <!-- Configure the standard fragListBuilder -->
+      <fragListBuilder name="simple"
+                       class="solr.highlight.SimpleFragListBuilder"/>
+
+      <!-- Configure the single fragListBuilder -->
+      <fragListBuilder name="single"
+                       class="solr.highlight.SingleFragListBuilder"/>
+
+      <!-- Configure the weighted fragListBuilder -->
+      <fragListBuilder name="weighted"
+                       default="true"
+                       class="solr.highlight.WeightedFragListBuilder"/>
+
+      <!-- default tag FragmentsBuilder -->
+      <fragmentsBuilder name="default"
+                        default="true"
+                        class="solr.highlight.ScoreOrderFragmentsBuilder">
+        <!--
+        <lst name="defaults">
+          <str name="hl.multiValuedSeparatorChar">/</str>
+        </lst>
+        -->
+      </fragmentsBuilder>
+
+      <!-- multi-colored tag FragmentsBuilder -->
+      <fragmentsBuilder name="colored"
+                        class="solr.highlight.ScoreOrderFragmentsBuilder">
+        <lst name="defaults">
+          <str name="hl.tag.pre"><![CDATA[
+               <b style="background:yellow">,<b style="background:lawgreen">,
+               <b style="background:aquamarine">,<b style="background:magenta">,
+               <b style="background:palegreen">,<b style="background:coral">,
+               <b style="background:wheat">,<b style="background:khaki">,
+               <b style="background:lime">,<b style="background:deepskyblue">]]></str>
+          <str name="hl.tag.post"><![CDATA[</b>]]></str>
+        </lst>
+      </fragmentsBuilder>
+
+      <boundaryScanner name="default"
+                       default="true"
+                       class="solr.highlight.SimpleBoundaryScanner">
+        <lst name="defaults">
+          <str name="hl.bs.maxScan">10</str>
+          <str name="hl.bs.chars">.,!? &#9;&#10;&#13;</str>
+        </lst>
+      </boundaryScanner>
+
+      <boundaryScanner name="breakIterator"
+                       class="solr.highlight.BreakIteratorBoundaryScanner">
+        <lst name="defaults">
+          <!-- type should be one of CHARACTER, WORD(default), LINE and SENTENCE -->
+          <str name="hl.bs.type">WORD</str>
+          <!-- language and country are used when constructing Locale object.  -->
+          <!-- And the Locale object will be used when getting instance of BreakIterator -->
+          <str name="hl.bs.language">en</str>
+          <str name="hl.bs.country">US</str>
+        </lst>
+      </boundaryScanner>
+    </highlighting>
+  </searchComponent>
+
+  <!-- Update Request Processors
+       https://solr.apache.org/guide/solr/latest/configuration-guide/update-request-processors.html
+
+       Chains or individual Update Request Processor Factories can be declared and referenced
+       to preprocess documents sent to Update Request Handlers.
+    -->
+
+  <!-- Add unknown fields to the schema
+
+       Field type guessing update request processors that will
+       attempt to parse string-typed field values as Booleans, Longs,
+       Doubles, or Dates, and then add schema fields with the guessed
+       field types Text content will be indexed as "text_general" as
+       well as a copy to a plain string version in *_str.
+       See the updateRequestProcessorChain defined later for the order they are executed in.
+
+       These require that the schema is both managed and mutable, by
+       declaring schemaFactory as ManagedIndexSchemaFactory, with
+       mutable specified as true.
+
+       See https://solr.apache.org/guide/solr/latest/indexing-guide/schemaless-mode.html for further explanation.
+
+    -->
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+  <updateProcessor class="solr.UUIDUpdateProcessorFactory" name="uuid"/>
+  <updateProcessor class="solr.RemoveBlankFieldUpdateProcessorFactory" name="remove-blank"/>
+  <updateProcessor class="solr.FieldNameMutatingUpdateProcessorFactory" name="field-name-mutating">
+    <str name="pattern">[^\w-\.]</str>
+    <str name="replacement">_</str>
+  </updateProcessor>
+  <updateProcessor class="solr.ParseBooleanFieldUpdateProcessorFactory" name="parse-boolean"/>
+  <updateProcessor class="solr.ParseLongFieldUpdateProcessorFactory" name="parse-long"/>
+  <updateProcessor class="solr.ParseDoubleFieldUpdateProcessorFactory" name="parse-double"/>
+  <updateProcessor class="solr.ParseDateFieldUpdateProcessorFactory" name="parse-date">
+    <arr name="format">
+      <str>yyyy-MM-dd['T'[HH:mm[:ss[.SSS]][z</str>
+      <str>yyyy-MM-dd['T'[HH:mm[:ss[,SSS]][z</str>
+      <str>yyyy-MM-dd HH:mm[:ss[.SSS]][z</str>
+      <str>yyyy-MM-dd HH:mm[:ss[,SSS]][z</str>
+      <str>[EEE, ]dd MMM yyyy HH:mm[:ss] z</str>
+      <str>EEEE, dd-MMM-yy HH:mm:ss z</str>
+      <str>EEE MMM ppd HH:mm:ss [z ]yyyy</str>
+    </arr>
+  </updateProcessor>
+  <updateProcessor class="solr.AddSchemaFieldsUpdateProcessorFactory" name="add-schema-fields">
+    <lst name="typeMapping">
+      <str name="valueClass">java.lang.String</str>
+      <str name="fieldType">text_general</str>
+      <lst name="copyField">
+        <str name="dest">*_str</str>
+        <int name="maxChars">256</int>
+      </lst>
+      <!-- Use as default mapping instead of defaultFieldType -->
+      <bool name="default">true</bool>
+    </lst>
+    <lst name="typeMapping">
+      <str name="valueClass">java.lang.Boolean</str>
+      <str name="fieldType">booleans</str>
+    </lst>
+    <lst name="typeMapping">
+      <str name="valueClass">java.util.Date</str>
+      <str name="fieldType">pdates</str>
+    </lst>
+    <lst name="typeMapping">
+      <str name="valueClass">java.lang.Long</str>
+      <str name="valueClass">java.lang.Integer</str>
+      <str name="fieldType">plongs</str>
+    </lst>
+    <lst name="typeMapping">
+      <str name="valueClass">java.lang.Number</str>
+      <str name="fieldType">pdoubles</str>
+    </lst>
+  </updateProcessor>
+
+  <!-- The update.autoCreateFields property can be turned to false to disable schemaless mode -->
+  <updateRequestProcessorChain name="add-unknown-fields-to-the-schema" default="${update.autoCreateFields:false}"
+           processor="uuid,remove-blank,field-name-mutating,parse-boolean,parse-long,parse-double,parse-date,add-schema-fields">
+    <processor class="solr.LogUpdateProcessorFactory"/>
+    <processor class="solr.DistributedUpdateProcessorFactory"/>
+    <processor class="solr.RunUpdateProcessorFactory"/>
+  </updateRequestProcessorChain>
+
+  <!-- Deduplication
+
+       An example dedup update request processor chain that creates the "id" field
+       on the fly based on the hash code of some other fields.  This
+       example has overwriteDupes set to false since we are using the
+       id field as the signatureField and Solr will maintain
+       uniqueness based on that anyway.
+
+    -->
+  <!--
+     <updateRequestProcessorChain name="dedupe">
+       <processor class="solr.processor.SignatureUpdateProcessorFactory">
+         <bool name="enabled">true</bool>
+         <str name="signatureField">id</str>
+         <str name="fields">name,features,cat</str>
+         <str name="signatureClass">solr.processor.Lookup3Signature</str>
+       </processor>
+       <processor class="solr.LogUpdateProcessorFactory" />
+       <processor class="solr.RunUpdateProcessorFactory" />
+     </updateRequestProcessorChain>
+    -->
+
+  <!-- Response Writers
+
+       https://solr.apache.org/guide/solr/latest/query-guide/response-writers.html
+
+       Request responses will be written using the writer specified by
+       the 'wt' request parameter matching the name of a registered
+       writer.
+
+       The "default" writer is the default and will be used if 'wt' is
+       not specified in the request.
+    -->
+  <!-- The following response writers are implicitly configured unless
+       overridden...
+    -->
+  <!--
+     <queryResponseWriter name="xml"
+                          default="true"
+                          class="solr.XMLResponseWriter" />
+     <queryResponseWriter name="json" class="solr.JSONResponseWriter"/>
+     <queryResponseWriter name="python" class="solr.PythonResponseWriter"/>
+     <queryResponseWriter name="ruby" class="solr.RubyResponseWriter"/>
+     <queryResponseWriter name="php" class="solr.PHPResponseWriter"/>
+     <queryResponseWriter name="phps" class="solr.PHPSerializedResponseWriter"/>
+     <queryResponseWriter name="csv" class="solr.CSVResponseWriter"/>
+     <queryResponseWriter name="schema.xml" class="solr.SchemaXmlResponseWriter"/>
+    -->
+
+  <!-- Overriding the content-type of the response writer.
+       For example, Default content-type of JSON is application/json. This can be overridden to
+       text/plain so that response is easy to read in *any* browser.
+   -->
+  <!--
+     <queryResponseWriter name="json" class="solr.JSONResponseWriter">
+        <str name="content-type">text/plain; charset=UTF-8</str>
+      </queryResponseWriter>
+   -->
+
+  <!-- Query Parsers
+
+       https://solr.apache.org/guide/solr/latest/query-guide/query-syntax-and-parsers.html
+
+       Multiple QParserPlugins can be registered by name, and then
+       used in either the "defType" param for the QueryComponent (used
+       by SearchHandler) or in LocalParams
+    -->
+  <!-- example of registering a query parser -->
+  <!--
+     <queryParser name="myparser" class="com.mycompany.MyQParserPlugin"/>
+    -->
+
+  <!-- Function Parsers
+
+       https://solr.apache.org/guide/solr/latest/query-guide/function-queries.html
+
+       Multiple ValueSourceParsers can be registered by name, and then
+       used as function names when using the "func" QParser.
+    -->
+  <!-- example of registering a custom function parser  -->
+  <!--
+     <valueSourceParser name="myfunc"
+                        class="com.mycompany.MyValueSourceParser" />
+    -->
+
+
+  <!-- Document Transformers
+       https://solr.apache.org/guide/solr/latest/query-guide/document-transformers.html
+    -->
+  <!--
+     Could be something like:
+     <transformer name="db" class="com.mycompany.LoadFromDatabaseTransformer" >
+       <int name="connection">jdbc://....</int>
+     </transformer>
+
+     To add a constant value to all docs, use:
+     <transformer name="mytrans2" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
+       <int name="value">5</int>
+     </transformer>
+
+     If you want the user to still be able to change it with _value:something_ use this:
+     <transformer name="mytrans3" class="org.apache.solr.response.transform.ValueAugmenterFactory" >
+       <double name="defaultValue">5</double>
+     </transformer>
+
+      If you are using the QueryElevationComponent, you may wish to mark documents that get boosted.  The
+      EditorialMarkerFactory will do exactly that:
+     <transformer name="qecBooster" class="org.apache.solr.response.transform.EditorialMarkerFactory" />
+    -->
+</config>
diff --git a/conf/solr/8.11.1/update-fields.sh b/conf/solr/9.3.0/update-fields.sh
similarity index 98%
rename from conf/solr/8.11.1/update-fields.sh
rename to conf/solr/9.3.0/update-fields.sh
index 49ea8151c77..386c1ee4e87 100755
--- a/conf/solr/8.11.1/update-fields.sh
+++ b/conf/solr/9.3.0/update-fields.sh
@@ -2,6 +2,8 @@
 
 set -euo pipefail
 
+# [INFO]: Update a prepared Solr schema.xml for Dataverse with a given list of metadata fields
+
 #### #### #### #### #### #### #### #### #### #### #### #### #### #### #### #### #### ####
 # This script will
 # 1. take a file (or read it from STDIN) with all <field> and <copyField> definitions
diff --git a/conf/vagrant/etc/shibboleth/attribute-map.xml b/conf/vagrant/etc/shibboleth/attribute-map.xml
deleted file mode 100644
index f6386b620f5..00000000000
--- a/conf/vagrant/etc/shibboleth/attribute-map.xml
+++ /dev/null
@@ -1,141 +0,0 @@
-<Attributes xmlns="urn:mace:shibboleth:2.0:attribute-map" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-
-    <!--
-    The mappings are a mix of SAML 1.1 and SAML 2.0 attribute names agreed to within the Shibboleth
-    community. The non-OID URNs are SAML 1.1 names and most of the OIDs are SAML 2.0 names, with a
-    few exceptions for newer attributes where the name is the same for both versions. You will
-    usually want to uncomment or map the names for both SAML versions as a unit.
-    -->
-    
-    <!-- First some useful eduPerson attributes that many sites might use. -->
-    
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonPrincipalName" id="eppn">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder"/>
-    </Attribute>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.6" id="eppn">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder"/>
-    </Attribute>
-    
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonScopedAffiliation" id="affiliation">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.9" id="affiliation">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonAffiliation" id="unscoped-affiliation">
-        <AttributeDecoder xsi:type="StringAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.1" id="unscoped-affiliation">
-        <AttributeDecoder xsi:type="StringAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonEntitlement" id="entitlement"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.7" id="entitlement"/>
-
-    <!-- A persistent id attribute that supports personalized anonymous access. -->
-    
-    <!-- First, the deprecated/incorrect version, decoded as a scoped string: -->
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonTargetedID" id="targeted-id">
-        <AttributeDecoder xsi:type="ScopedAttributeDecoder"/>
-        <!-- <AttributeDecoder xsi:type="NameIDFromScopedAttributeDecoder" formatter="$NameQualifier!$SPNameQualifier!$Name" defaultQualifiers="true"/> -->
-    </Attribute>
-    
-    <!-- Second, an alternate decoder that will decode the incorrect form into the newer form. -->
-    <!--
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonTargetedID" id="persistent-id">
-        <AttributeDecoder xsi:type="NameIDFromScopedAttributeDecoder" formatter="$NameQualifier!$SPNameQualifier!$Name" defaultQualifiers="true"/>
-    </Attribute>
-    -->
-    
-    <!-- Third, the new version (note the OID-style name): -->
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.10" id="persistent-id">
-        <AttributeDecoder xsi:type="NameIDAttributeDecoder" formatter="$NameQualifier!$SPNameQualifier!$Name" defaultQualifiers="true"/>
-    </Attribute>
-
-    <!-- Fourth, the SAML 2.0 NameID Format: -->
-    <Attribute name="urn:oasis:names:tc:SAML:2.0:nameid-format:persistent" id="persistent-id">
-        <AttributeDecoder xsi:type="NameIDAttributeDecoder" formatter="$NameQualifier!$SPNameQualifier!$Name" defaultQualifiers="true"/>
-    </Attribute>
-    
-    <!-- Some more eduPerson attributes, uncomment these to use them... -->
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonPrimaryAffiliation" id="primary-affiliation">
-        <AttributeDecoder xsi:type="StringAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonNickname" id="nickname"/>
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonPrimaryOrgUnitDN" id="primary-orgunit-dn"/>
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonOrgUnitDN" id="orgunit-dn"/>
-    <Attribute name="urn:mace:dir:attribute-def:eduPersonOrgDN" id="org-dn"/>
-
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.5" id="primary-affiliation">
-        <AttributeDecoder xsi:type="StringAttributeDecoder" caseSensitive="false"/>
-    </Attribute>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.2" id="nickname"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.8" id="primary-orgunit-dn"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.4" id="orgunit-dn"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.3" id="org-dn"/>
-
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.1.1.11" id="assurance"/>
-    
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.5.1.1" id="member"/>
-    
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.6.1.1" id="eduCourseOffering"/>
-    <Attribute name="urn:oid:1.3.6.1.4.1.5923.1.6.1.2" id="eduCourseMember"/>
-
-    <!-- Examples of LDAP-based attributes, uncomment to use these... -->
-    <Attribute name="urn:mace:dir:attribute-def:cn" id="cn"/>
-    <Attribute name="urn:mace:dir:attribute-def:sn" id="sn"/>
-    <Attribute name="urn:mace:dir:attribute-def:givenName" id="givenName"/>
-    <Attribute name="urn:mace:dir:attribute-def:displayName" id="displayName"/>
-    <Attribute name="urn:mace:dir:attribute-def:mail" id="mail"/>
-    <Attribute name="urn:mace:dir:attribute-def:telephoneNumber" id="telephoneNumber"/>
-    <Attribute name="urn:mace:dir:attribute-def:title" id="title"/>
-    <Attribute name="urn:mace:dir:attribute-def:initials" id="initials"/>
-    <Attribute name="urn:mace:dir:attribute-def:description" id="description"/>
-    <Attribute name="urn:mace:dir:attribute-def:carLicense" id="carLicense"/>
-    <Attribute name="urn:mace:dir:attribute-def:departmentNumber" id="departmentNumber"/>
-    <Attribute name="urn:mace:dir:attribute-def:employeeNumber" id="employeeNumber"/>
-    <Attribute name="urn:mace:dir:attribute-def:employeeType" id="employeeType"/>
-    <Attribute name="urn:mace:dir:attribute-def:preferredLanguage" id="preferredLanguage"/>
-    <Attribute name="urn:mace:dir:attribute-def:manager" id="manager"/>
-    <Attribute name="urn:mace:dir:attribute-def:seeAlso" id="seeAlso"/>
-    <Attribute name="urn:mace:dir:attribute-def:facsimileTelephoneNumber" id="facsimileTelephoneNumber"/>
-    <Attribute name="urn:mace:dir:attribute-def:street" id="street"/>
-    <Attribute name="urn:mace:dir:attribute-def:postOfficeBox" id="postOfficeBox"/>
-    <Attribute name="urn:mace:dir:attribute-def:postalCode" id="postalCode"/>
-    <Attribute name="urn:mace:dir:attribute-def:st" id="st"/>
-    <Attribute name="urn:mace:dir:attribute-def:l" id="l"/>
-    <Attribute name="urn:mace:dir:attribute-def:o" id="o"/>
-    <Attribute name="urn:mace:dir:attribute-def:ou" id="ou"/>
-    <Attribute name="urn:mace:dir:attribute-def:businessCategory" id="businessCategory"/>
-    <Attribute name="urn:mace:dir:attribute-def:physicalDeliveryOfficeName" id="physicalDeliveryOfficeName"/>
-
-    <Attribute name="urn:oid:0.9.2342.19200300.100.1.1" id="uid"/>
-    <Attribute name="urn:oid:2.5.4.3" id="cn"/>
-    <Attribute name="urn:oid:2.5.4.4" id="sn"/>
-    <Attribute name="urn:oid:2.5.4.42" id="givenName"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.241" id="displayName"/>
-    <Attribute name="urn:oid:0.9.2342.19200300.100.1.3" id="mail"/>
-    <Attribute name="urn:oid:2.5.4.20" id="telephoneNumber"/>
-    <Attribute name="urn:oid:2.5.4.12" id="title"/>
-    <Attribute name="urn:oid:2.5.4.43" id="initials"/>
-    <Attribute name="urn:oid:2.5.4.13" id="description"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.1" id="carLicense"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.2" id="departmentNumber"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.3" id="employeeNumber"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.4" id="employeeType"/>
-    <Attribute name="urn:oid:2.16.840.1.113730.3.1.39" id="preferredLanguage"/>
-    <Attribute name="urn:oid:0.9.2342.19200300.100.1.10" id="manager"/>
-    <Attribute name="urn:oid:2.5.4.34" id="seeAlso"/>
-    <Attribute name="urn:oid:2.5.4.23" id="facsimileTelephoneNumber"/>
-    <Attribute name="urn:oid:2.5.4.9" id="street"/>
-    <Attribute name="urn:oid:2.5.4.18" id="postOfficeBox"/>
-    <Attribute name="urn:oid:2.5.4.17" id="postalCode"/>
-    <Attribute name="urn:oid:2.5.4.8" id="st"/>
-    <Attribute name="urn:oid:2.5.4.7" id="l"/>
-    <Attribute name="urn:oid:2.5.4.10" id="o"/>
-    <Attribute name="urn:oid:2.5.4.11" id="ou"/>
-    <Attribute name="urn:oid:2.5.4.15" id="businessCategory"/>
-    <Attribute name="urn:oid:2.5.4.19" id="physicalDeliveryOfficeName"/>
-
-</Attributes>
diff --git a/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml b/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml
deleted file mode 100644
index 67225b5e670..00000000000
--- a/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml
+++ /dev/null
@@ -1,298 +0,0 @@
-<EntitiesDescriptor Name="urn:mace:shibboleth:testshib:two"
-    xmlns="urn:oasis:names:tc:SAML:2.0:metadata" xmlns:ds="http://www.w3.org/2000/09/xmldsig#"
-    xmlns:mdalg="urn:oasis:names:tc:SAML:metadata:algsupport" xmlns:mdui="urn:oasis:names:tc:SAML:metadata:ui"
-    xmlns:shibmd="urn:mace:shibboleth:metadata:1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-
-    <!-- This file contains the metadata for the testing IdP and SP
-     that are operated by TestShib as a service for testing new
-     Shibboleth and SAML providers. -->
-
-    <EntityDescriptor entityID="https://idp.testshib.org/idp/shibboleth">
-        
-        <Extensions>
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha512" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#sha384" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1" />
-        </Extensions>
-
-        <IDPSSODescriptor
-            protocolSupportEnumeration="urn:oasis:names:tc:SAML:1.1:protocol urn:mace:shibboleth:1.0 urn:oasis:names:tc:SAML:2.0:protocol">
-            <Extensions>
-                <shibmd:Scope regexp="false">testshib.org</shibmd:Scope>
-                <mdui:UIInfo>
-                    <mdui:DisplayName xml:lang="en">TestShib Test IdP</mdui:DisplayName>
-                    <mdui:Description xml:lang="en">TestShib IdP. Use this as a source of attributes
-                        for your test SP.</mdui:Description>
-                    <mdui:Logo height="88" width="75"
-                        >https://www.testshib.org/images/testshib-transp.png</mdui:Logo>
-                </mdui:UIInfo>
-
-            </Extensions>
-            <KeyDescriptor>
-                <ds:KeyInfo>
-                    <ds:X509Data>
-                        <ds:X509Certificate>
-                            MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV
-                            MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD
-                            VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4
-                            MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI
-                            EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl
-                            c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B
-                            AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C
-                            yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe
-                            3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT
-                            NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614
-                            kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH
-                            gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G
-                            A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86
-                            9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl
-                            bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo
-                            aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN
-                            BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL
-                            I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo
-                            93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4
-                            /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj
-                            Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr
-                            8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA==
-                        </ds:X509Certificate>
-                    </ds:X509Data>
-                </ds:KeyInfo>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes256-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes192-cbc" />
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#tripledes-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-1_5"/>
-            </KeyDescriptor>
-
-            <ArtifactResolutionService Binding="urn:oasis:names:tc:SAML:1.0:bindings:SOAP-binding"
-                Location="https://idp.testshib.org:8443/idp/profile/SAML1/SOAP/ArtifactResolution"
-                index="1"/>
-            <ArtifactResolutionService Binding="urn:oasis:names:tc:SAML:2.0:bindings:SOAP"
-                Location="https://idp.testshib.org:8443/idp/profile/SAML2/SOAP/ArtifactResolution"
-                index="2"/>
-
-            <NameIDFormat>urn:mace:shibboleth:1.0:nameIdentifier</NameIDFormat>
-            <NameIDFormat>urn:oasis:names:tc:SAML:2.0:nameid-format:transient</NameIDFormat>
-            
-            <SingleSignOnService Binding="urn:mace:shibboleth:1.0:profiles:AuthnRequest"
-                Location="https://idp.testshib.org/idp/profile/Shibboleth/SSO"/>
-            <SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
-                Location="https://idp.testshib.org/idp/profile/SAML2/POST/SSO"/>
-            <SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
-                Location="https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO"/>
-            <SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:SOAP" 
-                Location="https://idp.testshib.org/idp/profile/SAML2/SOAP/ECP"/>
-
-        </IDPSSODescriptor>
-
-
-        <AttributeAuthorityDescriptor
-            protocolSupportEnumeration="urn:oasis:names:tc:SAML:1.1:protocol urn:oasis:names:tc:SAML:2.0:protocol">
-
-            <KeyDescriptor>
-                <ds:KeyInfo>
-                    <ds:X509Data>
-                        <ds:X509Certificate>
-                            MIIEDjCCAvagAwIBAgIBADANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJVUzEV
-                            MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMREwDwYD
-                            VQQKEwhUZXN0U2hpYjEZMBcGA1UEAxMQaWRwLnRlc3RzaGliLm9yZzAeFw0wNjA4
-                            MzAyMTEyMjVaFw0xNjA4MjcyMTEyMjVaMGcxCzAJBgNVBAYTAlVTMRUwEwYDVQQI
-                            EwxQZW5uc3lsdmFuaWExEzARBgNVBAcTClBpdHRzYnVyZ2gxETAPBgNVBAoTCFRl
-                            c3RTaGliMRkwFwYDVQQDExBpZHAudGVzdHNoaWIub3JnMIIBIjANBgkqhkiG9w0B
-                            AQEFAAOCAQ8AMIIBCgKCAQEArYkCGuTmJp9eAOSGHwRJo1SNatB5ZOKqDM9ysg7C
-                            yVTDClcpu93gSP10nH4gkCZOlnESNgttg0r+MqL8tfJC6ybddEFB3YBo8PZajKSe
-                            3OQ01Ow3yT4I+Wdg1tsTpSge9gEz7SrC07EkYmHuPtd71CHiUaCWDv+xVfUQX0aT
-                            NPFmDixzUjoYzbGDrtAyCqA8f9CN2txIfJnpHE6q6CmKcoLADS4UrNPlhHSzd614
-                            kR/JYiks0K4kbRqCQF0Dv0P5Di+rEfefC6glV8ysC8dB5/9nb0yh/ojRuJGmgMWH
-                            gWk6h0ihjihqiu4jACovUZ7vVOCgSE5Ipn7OIwqd93zp2wIDAQABo4HEMIHBMB0G
-                            A1UdDgQWBBSsBQ869nh83KqZr5jArr4/7b+QazCBkQYDVR0jBIGJMIGGgBSsBQ86
-                            9nh83KqZr5jArr4/7b+Qa6FrpGkwZzELMAkGA1UEBhMCVVMxFTATBgNVBAgTDFBl
-                            bm5zeWx2YW5pYTETMBEGA1UEBxMKUGl0dHNidXJnaDERMA8GA1UEChMIVGVzdFNo
-                            aWIxGTAXBgNVBAMTEGlkcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN
-                            BgkqhkiG9w0BAQUFAAOCAQEAjR29PhrCbk8qLN5MFfSVk98t3CT9jHZoYxd8QMRL
-                            I4j7iYQxXiGJTT1FXs1nd4Rha9un+LqTfeMMYqISdDDI6tv8iNpkOAvZZUosVkUo
-                            93pv1T0RPz35hcHHYq2yee59HJOco2bFlcsH8JBXRSRrJ3Q7Eut+z9uo80JdGNJ4
-                            /SJy5UorZ8KazGj16lfJhOBXldgrhppQBb0Nq6HKHguqmwRfJ+WkxemZXzhediAj
-                            Geka8nz8JjwxpUjAiSWYKLtJhGEaTqCYxCCX2Dw+dOTqUzHOZ7WKv4JXPK5G/Uhr
-                            8K/qhmFT2nIQi538n6rVYLeWj8Bbnl+ev0peYzxFyF5sQA==
-                        </ds:X509Certificate>
-                    </ds:X509Data>
-                </ds:KeyInfo>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes256-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes192-cbc" />
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#tripledes-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-1_5"/>
-            </KeyDescriptor>
-
-
-            <AttributeService Binding="urn:oasis:names:tc:SAML:1.0:bindings:SOAP-binding"
-                Location="https://idp.testshib.org:8443/idp/profile/SAML1/SOAP/AttributeQuery"/>
-            <AttributeService Binding="urn:oasis:names:tc:SAML:2.0:bindings:SOAP"
-                Location="https://idp.testshib.org:8443/idp/profile/SAML2/SOAP/AttributeQuery"/>
-
-            <NameIDFormat>urn:mace:shibboleth:1.0:nameIdentifier</NameIDFormat>
-            <NameIDFormat>urn:oasis:names:tc:SAML:2.0:nameid-format:transient</NameIDFormat>
-
-        </AttributeAuthorityDescriptor>
-
-        <Organization>
-            <OrganizationName xml:lang="en">TestShib Two Identity Provider</OrganizationName>
-            <OrganizationDisplayName xml:lang="en">TestShib Two</OrganizationDisplayName>
-            <OrganizationURL xml:lang="en">http://www.testshib.org/testshib-two/</OrganizationURL>
-        </Organization>
-        <ContactPerson contactType="technical">
-            <GivenName>Nate</GivenName>
-            <SurName>Klingenstein</SurName>
-            <EmailAddress>ndk@internet2.edu</EmailAddress>
-        </ContactPerson>
-    </EntityDescriptor>
-
-    <EntityDescriptor entityID="https://sp.testshib.org/shibboleth-sp">
-
-        <Extensions>
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha512" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#sha384" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2001/04/xmlenc#sha256" />
-            <mdalg:DigestMethod Algorithm="http://www.w3.org/2000/09/xmldsig#sha1" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha512" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha384" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" />
-            <mdalg:SigningMethod Algorithm="http://www.w3.org/2000/09/xmldsig#rsa-sha1" />
-        </Extensions>
-        
-        <!-- An SP supporting SAML 1 and 2 contains this element with protocol support as shown. -->
-        <SPSSODescriptor
-            protocolSupportEnumeration="urn:oasis:names:tc:SAML:2.0:protocol urn:oasis:names:tc:SAML:1.1:protocol http://schemas.xmlsoap.org/ws/2003/07/secext">
-
-            <Extensions>
-                <!-- Extension to permit the SP to receive IdP discovery responses. -->
-                <idpdisc:DiscoveryResponse
-                    xmlns:idpdisc="urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol"
-                    index="1" Binding="urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol"
-                    Location="https://sp.testshib.org/Shibboleth.sso/DS"/>
-                
-                <mdui:UIInfo>
-                    <mdui:DisplayName xml:lang="en">TestShib Test SP</mdui:DisplayName>
-                    <mdui:Description xml:lang="en">TestShib SP. Log into this to test your machine.
-                        Once logged in check that all attributes that you expected have been
-                        released.</mdui:Description>
-                    <mdui:Logo height="88" width="75">https://www.testshib.org/images/testshib-transp.png</mdui:Logo>
-                </mdui:UIInfo>
-            </Extensions>
-
-            <KeyDescriptor>
-                <ds:KeyInfo>
-                    <ds:X509Data>
-                        <ds:X509Certificate>
-                            MIIEPjCCAyagAwIBAgIBADANBgkqhkiG9w0BAQUFADB3MQswCQYDVQQGEwJVUzEV
-                            MBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1cmdoMSIwIAYD
-                            VQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3ZpZGVyMRgwFgYDVQQDEw9zcC50ZXN0
-                            c2hpYi5vcmcwHhcNMDYwODMwMjEyNDM5WhcNMTYwODI3MjEyNDM5WjB3MQswCQYD
-                            VQQGEwJVUzEVMBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYDVQQHEwpQaXR0c2J1
-                            cmdoMSIwIAYDVQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3ZpZGVyMRgwFgYDVQQD
-                            Ew9zcC50ZXN0c2hpYi5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
-                            AQDJyR6ZP6MXkQ9z6RRziT0AuCabDd3x1m7nLO9ZRPbr0v1LsU+nnC363jO8nGEq
-                            sqkgiZ/bSsO5lvjEt4ehff57ERio2Qk9cYw8XCgmYccVXKH9M+QVO1MQwErNobWb
-                            AjiVkuhWcwLWQwTDBowfKXI87SA7KR7sFUymNx5z1aoRvk3GM++tiPY6u4shy8c7
-                            vpWbVfisfTfvef/y+galxjPUQYHmegu7vCbjYP3On0V7/Ivzr+r2aPhp8egxt00Q
-                            XpilNai12LBYV3Nv/lMsUzBeB7+CdXRVjZOHGuQ8mGqEbsj8MBXvcxIKbcpeK5Zi
-                            JCVXPfarzuriM1G5y5QkKW+LAgMBAAGjgdQwgdEwHQYDVR0OBBYEFKB6wPDxwYrY
-                            StNjU5P4b4AjBVQVMIGhBgNVHSMEgZkwgZaAFKB6wPDxwYrYStNjU5P4b4AjBVQV
-                            oXukeTB3MQswCQYDVQQGEwJVUzEVMBMGA1UECBMMUGVubnN5bHZhbmlhMRMwEQYD
-                            VQQHEwpQaXR0c2J1cmdoMSIwIAYDVQQKExlUZXN0U2hpYiBTZXJ2aWNlIFByb3Zp
-                            ZGVyMRgwFgYDVQQDEw9zcC50ZXN0c2hpYi5vcmeCAQAwDAYDVR0TBAUwAwEB/zAN
-                            BgkqhkiG9w0BAQUFAAOCAQEAc06Kgt7ZP6g2TIZgMbFxg6vKwvDL0+2dzF11Onpl
-                            5sbtkPaNIcj24lQ4vajCrrGKdzHXo9m54BzrdRJ7xDYtw0dbu37l1IZVmiZr12eE
-                            Iay/5YMU+aWP1z70h867ZQ7/7Y4HW345rdiS6EW663oH732wSYNt9kr7/0Uer3KD
-                            9CuPuOidBacospDaFyfsaJruE99Kd6Eu/w5KLAGG+m0iqENCziDGzVA47TngKz2v
-                            PVA+aokoOyoz3b53qeti77ijatSEoKjxheBWpO+eoJeGq/e49Um3M2ogIX/JAlMa
-                            Inh+vYSYngQB2sx9LGkR9KHaMKNIGCDehk93Xla4pWJx1w== 
-                        </ds:X509Certificate>
-                    </ds:X509Data>
-                </ds:KeyInfo>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes256-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes192-cbc" />
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#aes128-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#tripledes-cbc"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"/>
-                <EncryptionMethod Algorithm="http://www.w3.org/2001/04/xmlenc#rsa-1_5"/>
-            </KeyDescriptor>
-
-            <!-- This tells IdPs that Single Logout is supported and where/how to request it. -->
-
-            <SingleLogoutService Location="https://sp.testshib.org/Shibboleth.sso/SLO/SOAP"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:SOAP"/>
-            <SingleLogoutService Location="https://sp.testshib.org/Shibboleth.sso/SLO/Redirect"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"/>
-            <SingleLogoutService Location="https://sp.testshib.org/Shibboleth.sso/SLO/POST"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"/>
-            <SingleLogoutService Location="https://sp.testshib.org/Shibboleth.sso/SLO/Artifact"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact"/>
-
-
-            <!-- This tells IdPs that you only need transient identifiers. -->
-            <NameIDFormat>urn:oasis:names:tc:SAML:2.0:nameid-format:transient</NameIDFormat>
-            <NameIDFormat>urn:mace:shibboleth:1.0:nameIdentifier</NameIDFormat>
-
-            <!--
-		This tells IdPs where and how to send authentication assertions. Mostly
-		the SP will tell the IdP what location to use in its request, but this
-		is how the IdP validates the location and also figures out which
-		SAML version/binding to use.
-		-->
-
-            <AssertionConsumerService index="1" isDefault="true"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML2/POST"/>
-            <AssertionConsumerService index="2"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST-SimpleSign"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML2/POST-SimpleSign"/>
-            <AssertionConsumerService index="3"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML2/Artifact"/>
-            <AssertionConsumerService index="4"
-                Binding="urn:oasis:names:tc:SAML:1.0:profiles:browser-post"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML/POST"/>
-            <AssertionConsumerService index="5"
-                Binding="urn:oasis:names:tc:SAML:1.0:profiles:artifact-01"
-                Location="https://sp.testshib.org/Shibboleth.sso/SAML/Artifact"/>
-            <AssertionConsumerService index="6"
-                Binding="http://schemas.xmlsoap.org/ws/2003/07/secext"
-                Location="https://sp.testshib.org/Shibboleth.sso/ADFS"/>
-
-            <!-- A couple additional assertion consumers for the registration webapp. -->
-
-            <AssertionConsumerService index="7"
-                Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
-                Location="https://www.testshib.org/Shibboleth.sso/SAML2/POST"/>
-            <AssertionConsumerService index="8"
-                Binding="urn:oasis:names:tc:SAML:1.0:profiles:browser-post"
-                Location="https://www.testshib.org/Shibboleth.sso/SAML/POST"/>
-
-        </SPSSODescriptor>
-
-        <!-- This is just information about the entity in human terms. -->
-        <Organization>
-            <OrganizationName xml:lang="en">TestShib Two Service Provider</OrganizationName>
-            <OrganizationDisplayName xml:lang="en">TestShib Two</OrganizationDisplayName>
-            <OrganizationURL xml:lang="en">http://www.testshib.org/testshib-two/</OrganizationURL>
-        </Organization>
-        <ContactPerson contactType="technical">
-            <GivenName>Nate</GivenName>
-            <SurName>Klingenstein</SurName>
-            <EmailAddress>ndk@internet2.edu</EmailAddress>
-        </ContactPerson>
-
-    </EntityDescriptor>
-
-
-</EntitiesDescriptor>
-
diff --git a/conf/vagrant/etc/shibboleth/shibboleth2.xml b/conf/vagrant/etc/shibboleth/shibboleth2.xml
deleted file mode 100644
index 946e73bdf6a..00000000000
--- a/conf/vagrant/etc/shibboleth/shibboleth2.xml
+++ /dev/null
@@ -1,85 +0,0 @@
-<!--
-This is an example shibboleth2.xml generated for you by TestShib.  It's reduced and recommended
-specifically for testing.  You don't need to change anything, but you may want to explore the file
-to learn about how your SP works.  Uncomment attributes in your attribute-map.xml file to test them.
-
-If you want to test advanced functionality, start from the distribution shibboleth2.xml and add the
-MetadataProvider, the right entityID, and a properly configured SSO element.  More information:
-
-https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPConfiguration
--->
-
-<SPConfig xmlns="urn:mace:shibboleth:2.0:native:sp:config" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata"
-    clockSkew="1800">
-
-    <!-- The entityID is the name TestShib made for your SP. -->
-    <ApplicationDefaults entityID="https://pdurbin.pagekite.me/shibboleth"
-        REMOTE_USER="eppn" attributePrefix="AJP_">
-
-        <!-- You should use secure cookies if at all possible.  See cookieProps in this Wiki article. -->
-        <!-- https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPSessions -->
-        <Sessions lifetime="28800" timeout="3600" checkAddress="false" relayState="ss:mem" handlerSSL="false">
-
-            <!-- Triggers a login request directly to the TestShib IdP. -->
-            <!-- https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPServiceSSO -->
-            <!-- A single explicit entityId could be used for Dataverse installation that only cares about one IdP. -->
-            <!-- <SSO entityID="https://idp.testshib.org/idp/shibboleth">SAML2 SAML1</SSO> -->
-
-            <!-- not used but maybe helpful for troubleshooting -->
-            <!--<SSO discoveryProtocol="SAMLDS" discoveryURL="https://pdurbin.pagekite.me/shibboleth-ds/index.html">SAML2 SAML1</SSO>-->
-            <!-- picker is displayed as an iframe in this page -->
-            <SSO discoveryProtocol="SAMLDS" discoveryURL="http://pdurbin.pagekite.me/loginpage.xhtml">SAML2 SAML1</SSO>
-<!--
--->
-            <!-- SAML and local-only logout. -->
-            <!-- https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPServiceLogout -->
-            <Logout>SAML2 Local</Logout>
-
-            <!--
-                Handlers allow you to interact with the SP and gather more information.  Try them out!
-                Attribute values received by the SP through SAML will be visible at:
-                http://pdurbin.pagekite.me/Shibboleth.sso/Session
-            -->
-
-            <!-- Extension service that generates "approximate" metadata based on SP configuration. -->
-            <Handler type="MetadataGenerator" Location="/Metadata" signing="false"/>
-
-            <!-- Status reporting service. -->
-            <Handler type="Status" Location="/Status" acl="127.0.0.1"/>
-
-            <!-- Session diagnostic service. -->
-            <Handler type="Session" Location="/Session" showAttributeValues="true"/>
-
-            <!-- JSON feed of discovery information. -->
-            <Handler type="DiscoveryFeed" Location="/DiscoFeed"/>
-
-        </Sessions>
-
-        <!-- Error pages to display to yourself if something goes horribly wrong. -->
-        <Errors supportContact="root@localhost" logoLocation="/shibboleth-sp/logo.jpg" 
-                styleSheet="/shibboleth-sp/main.css"/>
-
-        <!-- Loads and trusts a metadata file that describes only the Testshib IdP and how to communicate with it. -->
-        <!-- For Dataverse we hard code a local file instead of using a URI -->
-        <!-- <MetadataProvider type="XML" uri="http://www.testshib.org/metadata/testshib-providers.xml" backingFilePath="testshib-two-idp-metadata.xml" reloadInterval="180000" /> -->
-
-        <!-- IdPs we want allow go in /etc/shibboleth/dataverse-idp-metadata.xml -->
-        <MetadataProvider type="XML" file="dataverse-idp-metadata.xml" backingFilePath="local-idp-metadata.xml" legacyOrgNames="true" reloadInterval="7200"/>
-
-        <!-- Attribute and trust options you shouldn't need to change. -->
-        <AttributeExtractor type="XML" validate="true" path="attribute-map.xml"/>
-        <AttributeResolver type="Query" subjectMatch="true"/>
-        <AttributeFilter type="XML" validate="true" path="attribute-policy.xml"/>
-
-        <!-- Your SP generated these credentials.  They're used to talk to IdP's. -->
-        <CredentialResolver type="File" key="sp-key.pem" certificate="sp-cert.pem"/>
-
-    </ApplicationDefaults>
-    
-    <!-- Security policies you shouldn't change unless you know what you're doing. -->
-    <SecurityPolicyProvider type="XML" validate="true" path="security-policy.xml"/>
-
-    <!-- Low-level configuration about protocols and bindings available for use. -->
-    <ProtocolProvider type="XML" validate="true" reloadChanges="false" path="protocols.xml"/>
-
-</SPConfig>
diff --git a/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo b/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo
deleted file mode 100644
index 1e0f8200040..00000000000
--- a/conf/vagrant/etc/yum.repos.d/epel-apache-maven.repo
+++ /dev/null
@@ -1,15 +0,0 @@
-# Place this file in your /etc/yum.repos.d/ directory
-
-[epel-apache-maven]
-name=maven from apache foundation.
-baseurl=http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-$releasever/$basearch/
-enabled=1
-skip_if_unavailable=1
-gpgcheck=0
-
-[epel-apache-maven-source]
-name=maven from apache foundation. - Source
-baseurl=http://repos.fedorapeople.org/repos/dchen/apache-maven/epel-$releasever/SRPMS
-enabled=0
-skip_if_unavailable=1
-gpgcheck=0
diff --git a/conf/vagrant/etc/yum.repos.d/shibboleth.repo b/conf/vagrant/etc/yum.repos.d/shibboleth.repo
deleted file mode 100644
index adf42185d8a..00000000000
--- a/conf/vagrant/etc/yum.repos.d/shibboleth.repo
+++ /dev/null
@@ -1,9 +0,0 @@
-[shibboleth]
-name=Shibboleth (rockylinux8)
-# Please report any problems to https://shibboleth.atlassian.net/jira
-type=rpm-md
-mirrorlist=https://shibboleth.net/cgi-bin/mirrorlist.cgi/rockylinux8
-gpgcheck=1
-gpgkey=https://shibboleth.net/downloads/service-provider/RPMS/repomd.xml.key
-        https://shibboleth.net/downloads/service-provider/RPMS/cantor.repomd.xml.key
-enabled=1
diff --git a/conf/vagrant/var/lib/pgsql/data/pg_hba.conf b/conf/vagrant/var/lib/pgsql/data/pg_hba.conf
deleted file mode 100644
index e3244686066..00000000000
--- a/conf/vagrant/var/lib/pgsql/data/pg_hba.conf
+++ /dev/null
@@ -1,74 +0,0 @@
-# PostgreSQL Client Authentication Configuration File
-# ===================================================
-#
-# Refer to the "Client Authentication" section in the
-# PostgreSQL documentation for a complete description
-# of this file.  A short synopsis follows.
-#
-# This file controls: which hosts are allowed to connect, how clients
-# are authenticated, which PostgreSQL user names they can use, which
-# databases they can access.  Records take one of these forms:
-#
-# local      DATABASE  USER  METHOD  [OPTIONS]
-# host       DATABASE  USER  CIDR-ADDRESS  METHOD  [OPTIONS]
-# hostssl    DATABASE  USER  CIDR-ADDRESS  METHOD  [OPTIONS]
-# hostnossl  DATABASE  USER  CIDR-ADDRESS  METHOD  [OPTIONS]
-#
-# (The uppercase items must be replaced by actual values.)
-#
-# The first field is the connection type: "local" is a Unix-domain socket,
-# "host" is either a plain or SSL-encrypted TCP/IP socket, "hostssl" is an
-# SSL-encrypted TCP/IP socket, and "hostnossl" is a plain TCP/IP socket.
-#
-# DATABASE can be "all", "sameuser", "samerole", a database name, or
-# a comma-separated list thereof.
-#
-# USER can be "all", a user name, a group name prefixed with "+", or
-# a comma-separated list thereof.  In both the DATABASE and USER fields
-# you can also write a file name prefixed with "@" to include names from
-# a separate file.
-#
-# CIDR-ADDRESS specifies the set of hosts the record matches.
-# It is made up of an IP address and a CIDR mask that is an integer
-# (between 0 and 32 (IPv4) or 128 (IPv6) inclusive) that specifies
-# the number of significant bits in the mask.  Alternatively, you can write
-# an IP address and netmask in separate columns to specify the set of hosts.
-#
-# METHOD can be "trust", "reject", "md5", "password", "gss", "sspi", "krb5",
-# "ident", "pam", "ldap" or "cert".  Note that "password" sends passwords
-# in clear text; "md5" is preferred since it sends encrypted passwords.
-#
-# OPTIONS are a set of options for the authentication in the format
-# NAME=VALUE. The available options depend on the different authentication
-# methods - refer to the "Client Authentication" section in the documentation
-# for a list of which options are available for which authentication methods.
-#
-# Database and user names containing spaces, commas, quotes and other special
-# characters must be quoted. Quoting one of the keywords "all", "sameuser" or
-# "samerole" makes the name lose its special character, and just match a
-# database or username with that name.
-#
-# This file is read on server startup and when the postmaster receives
-# a SIGHUP signal.  If you edit the file on a running system, you have
-# to SIGHUP the postmaster for the changes to take effect.  You can use
-# "pg_ctl reload" to do that.
-
-# Put your actual configuration here
-# ----------------------------------
-#
-# If you want to allow non-local connections, you need to add more
-# "host" records. In that case you will also need to make PostgreSQL listen
-# on a non-local interface via the listen_addresses configuration parameter,
-# or via the -i or -h command line switches.
-#
-
-
-
-# TYPE  DATABASE    USER        CIDR-ADDRESS          METHOD
-
-# "local" is for Unix domain socket connections only
-local   all         all                               trust
-# IPv4 local connections:
-host    all         all         127.0.0.1/32          trust
-# IPv6 local connections:
-host    all         all         ::1/128               trust
diff --git a/conf/vagrant/var/www/dataverse/error-documents/503.html b/conf/vagrant/var/www/dataverse/error-documents/503.html
deleted file mode 100644
index 95a7dea4107..00000000000
--- a/conf/vagrant/var/www/dataverse/error-documents/503.html
+++ /dev/null
@@ -1 +0,0 @@
-<p>Custom "site is unavailable" 503 page.</p>
diff --git a/doc/release-notes/1249-collapse_dataverse_description.md b/doc/release-notes/1249-collapse_dataverse_description.md
deleted file mode 100644
index 8fe933005de..00000000000
--- a/doc/release-notes/1249-collapse_dataverse_description.md
+++ /dev/null
@@ -1 +0,0 @@
-Long descriptions for collections are now truncated but can be expanded to read the full description.
diff --git a/doc/release-notes/5.13-release-notes.md b/doc/release-notes/5.13-release-notes.md
index 0463b7d18a3..5e1741aec7e 100644
--- a/doc/release-notes/5.13-release-notes.md
+++ b/doc/release-notes/5.13-release-notes.md
@@ -80,7 +80,7 @@ See [Metadata Blocks](https://guides.dataverse.org/en/5.13/api/native-api.html#m
 
 ### Advanced Database Settings
 
-You can now enable advanced database connection pool configurations useful for debugging and monitoring as well as other settings. Of particular interest may be `sslmode=require`. See the new [Database Persistence](https://guides.dataverse.org/en/5.13/installation/config.html#database-persistence) section of the Installation Guide for details. (PR #8915)
+You can now enable advanced database connection pool configurations useful for debugging and monitoring as well as other settings. Of particular interest may be `sslmode=require`, though installations already setting this parameter in the Postgres connection string will need to move it to `dataverse.db.parameters`. See the new [Database Persistence](https://guides.dataverse.org/en/5.13/installation/config.html#database-persistence) section of the Installation Guide for details. (PR #8915)
 
 ### Support for Cleaning up Leftover Files in Dataset Storage
 
diff --git a/doc/release-notes/5.14-release-notes.md b/doc/release-notes/5.14-release-notes.md
new file mode 100644
index 00000000000..ef2a3b59659
--- /dev/null
+++ b/doc/release-notes/5.14-release-notes.md
@@ -0,0 +1,404 @@
+# Dataverse Software 5.14
+
+(If this note appears truncated on the GitHub Releases page, you can view it in full in the source tree: https://github.com/IQSS/dataverse/blob/master/doc/release-notes/5.14-release-notes.md)
+
+This release brings new features, enhancements, and bug fixes to the Dataverse software. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project.
+
+Please note that, as an experiment, the sections of this release note are organized in a different order. The Upgrade and Installation sections are at the top, with the detailed sections highlighting new features and fixes further down. 
+
+## Installation
+
+If this is a new installation, please see our [Installation Guide](https://guides.dataverse.org/en/5.14/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/5.14/installation/intro.html#getting-help) if you need it!
+
+After your installation has gone into production, you are welcome to add it to our [map of installations](https://dataverse.org/installations) by opening an issue in the [dataverse-installations](https://github.com/IQSS/dataverse-installations) repo.
+
+## Upgrade Instructions
+
+0\. These instructions assume that you are upgrading from 5.13. If you are running an earlier version, the only safe way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to 5.14. 
+
+If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user.
+
+In the following commands we assume that Payara 5 is installed in `/usr/local/payara5`. If not, adjust as needed.
+
+`export PAYARA=/usr/local/payara5`
+
+(or `setenv PAYARA /usr/local/payara5` if you are using a `csh`-like shell)
+
+1\. Undeploy the previous version.
+
+- `$PAYARA/bin/asadmin undeploy dataverse-5.13`
+
+2\. Stop Payara and remove the generated directory
+
+- `service payara stop`
+- `rm -rf $PAYARA/glassfish/domains/domain1/generated`
+
+3\. Start Payara
+
+- `service payara start`
+
+4\. Deploy this version.
+
+- `$PAYARA/bin/asadmin deploy dataverse-5.14.war`
+
+5\. Restart Payara
+
+- `service payara stop`
+- `service payara start`
+
+6\. Update the Citation metadata block: (the update makes the field Series repeatable)
+
+- `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv`
+- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"`
+
+If you are running an English-only installation, you are finished with the citation block. Otherwise, download the updated citation.properties file and place it in the [`dataverse.lang.directory`](https://guides.dataverse.org/en/5.14/installation/config.html#configuring-the-lang-directory); `/home/dataverse/langBundles` used in the example below.
+
+- `wget https://github.com/IQSS/dataverse/releases/download/v5.14/citation.properties`
+- `cp citation.properties /home/dataverse/langBundles`
+
+7\. Upate Solr schema.xml to allow multiple series to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with  custom metadata blocks  (7b).
+
+7a\. For installations without custom or experimental metadata blocks:
+
+- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script))
+
+- Replace schema.xml
+
+  - `cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf`
+
+- Start Solr instance (usually `service solr start`, depending on Solr/OS)
+
+7b\. For installations with custom or experimental metadata blocks:
+
+- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script))
+
+- There are 2 ways to regenerate the schema: Either by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed):
+```
+	wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/8.11.1/update-fields.sh
+	chmod +x update-fields.sh
+	curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-8.8.1/server/solr/collection1/conf/schema.xml
+```
+OR, alternatively, you can edit the following lines in your schema.xml by hand as follows (to indicate that series and its components are now `multiValued="true"`):
+```
+     <field name="series" type="string" stored="true" indexed="true" multiValued="true"/>
+     <field name="seriesInformation" type="text_en" multiValued="true" stored="true" indexed="true"/>
+     <field name="seriesName" type="text_en" multiValued="true" stored="true" indexed="true"/>
+```
+     
+- Restart Solr instance (usually `service solr restart` depending on solr/OS)
+
+8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api).
+
+9\. If your installation did not have :FilePIDsEnabled set, you will need to set it to true to keep file PIDs enabled:
+
+      curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled
+
+10\. If your installation uses Handles as persistent identifiers (instead of DOIs): remember to upgrade your Handles service installation to a currently supported version.
+
+Generally, Handles is known to be working reliably even when running older versions that haven't been officially supported in years. We still recommend to check on your service and make sure to upgrade to a supported version (the latest version is 9.3.1, https://www.handle.net/hnr-source/handle-9.3.1-distribution.tar.gz, as of writing this). An older version may be running for you seemingly just fine, but do keep in mind that it may just stop working unexpectedly at any moment, because of some incompatibility introduced in a Java rpm upgrade, or anything similarly unpredictable.
+
+Handles is also very good about backward incompatibility. Meaning, in most cases you can simply stop the old version, unpack the new version from the distribution and start it on the existing config and database files, and it'll just keep working. However, it is a good idea to keep up with the recommended format upgrades, for the sake of efficiency and to avoid any unexpected surprises, should they finally decide to drop the old database format, for example. The two specific things we recommend: 1) Make sure your service is using a json version of the `siteinfo` bundle (i.e., if you are still using `siteinfo.bin`, convert it to `siteinfo.json` and remove the binary file from the service directory) and 2) Make sure you are using the newer bdbje database format for your handles catalog (i.e., if you still have the files `handles.jdb` and `nas.jdb` in your server directory, convert them to the new format). Follow the simple conversion instructions in the file README.txt in the Handles software distribution. Make sure to stop the service before converting the files and make sure to have a full backup of the existing server directory, just in case. Do not hesitate to contact the Handles support with any questions you may have, as they are very responsive and helpful.
+
+## New JVM Options and MicroProfile Config Options
+
+The following PID provider options are now available. See the section "Changes to PID Provider JVM Settings" below for more information. 
+
+- `dataverse.pid.datacite.mds-api-url`
+- `dataverse.pid.datacite.rest-api-url`
+- `dataverse.pid.datacite.username`
+- `dataverse.pid.datacite.password`
+- `dataverse.pid.handlenet.key.path`
+- `dataverse.pid.handlenet.key.passphrase`
+- `dataverse.pid.handlenet.index`
+- `dataverse.pid.permalink.base-url`
+- `dataverse.pid.ezid.api-url`
+- `dataverse.pid.ezid.username`
+- `dataverse.pid.ezid.password`
+
+The following MicroProfile Config options have been added as part of [Signposting](https://signposting.org/) support. See the section "Signposting for Dataverse" below for details. 
+
+- `dataverse.signposting.level1-author-limit`
+- `dataverse.signposting.level1-item-limit`
+
+The following JVM options are described in the "Creating datasets with incomplete metadata through API" section below. 
+
+- `dataverse.api.allow-incomplete-metadata`
+- `dataverse.ui.show-validity-filter`
+- `dataverse.ui.allow-review-for-incomplete`
+
+The following JVM/MicroProfile setting is for External Exporters. See "Mechanism Added for Adding External Exporters" below.
+
+- `dataverse.spi.export.directory`
+
+The following JVM/MicroProfile settings are for handling of support emails. See "Contact Email Improvements" below.
+
+- `dataverse.mail.support-email`
+- `dataverse.mail.cc-support-on-contact-emails` 
+
+The following JVM/MicroProfile setting is for extracting a geospatial bounding box even if S3 direct upload is enabled.
+
+- `dataverse.netcdf.geo-extract-s3-direct-upload`
+
+## Backward Incompatibilities
+
+The following list of potential backward incompatibilities references the sections of the "Detailed Release Highlights..." portion of the document further below where the corresponding changes are explained in detail.
+
+### Using the new External Exporters framework
+
+Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters, may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated.
+
+See "Mechanism Added for Adding External Exporters". 
+
+### Publishing via API 
+
+When publishing a dataset via API, it now mirrors the UI behavior by requiring that the dataset has either a standard license configured, or has valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. 
+
+See "Handling of license information fixed in the API" for guidance on how to ensure that datasets created or updated via native API have a license configured.
+
+
+
+## Detailed Release Highlights, New Features and Use Case Scenarios
+
+### For Dataverse developers, support for running Dataverse in Docker (experimental)
+
+Developers can experiment with running Dataverse in Docker: (PR #9439)
+
+This is an image developers build locally (or can pull from Docker Hub). It is not meant for production use!
+
+To provide a complete container-based local development environment, developers can deploy a Dataverse container from 
+the new image in addition to other containers for necessary dependencies: 
+https://guides.dataverse.org/en/5.14/container/dev-usage.html
+
+Please note that with this emerging solution we will sunset older tooling like `docker-aio` and `docker-dcm`.
+We envision more testing possibilities in the future, to be discussed as part of the
+[Dataverse Containerization Working Group](https://ct.gdcc.io). There is no sunsetting roadmap yet, but you have been warned.
+If there is some specific feature of these tools you would like to be kept, please [reach out](https://ct.gdcc.io).
+
+### Indexing performance improved
+
+Noticeable improvements in performance, especially for large datasets containing thousands of files.
+Uploading files one by one to the dataset is much faster now, allowing uploading thousands of files in an acceptable timeframe. Not only uploading a file, but all edit operations on datasets containing many files, got faster.
+Performance tweaks include indexing of the datasets in the background and optimizations in the amount of the indexing operations needed. Furthermore, updates to the dateset no longer wait for ingesting to finish. Ingesting was already running in the background, but it took a lock, preventing updating the dataset and degrading performance for datasets containing many files. (PR #9558)
+
+### For installations using MDC (Make Data Count), it is now possible to display both the MDC metrics and the legacy access counts, generated before MDC was enabled.
+
+This is enabled via the new setting `:MDCStartDate` that specifies the cutoff date. If a dataset has any legacy access counts collected prior to that date, those numbers will be displayed in addition to any MDC numbers recorded since then. (PR #6543)
+
+### Changes to PID Provider JVM Settings
+
+In preparation for a future feature to use multiple PID providers at the same time, all JVM settings for PID providers
+have been enabled to be configured using MicroProfile Config. In the same go, they were renamed to match the name
+of the provider to be configured.
+
+Please watch your log files for deprecation warnings. Your old settings will be picked up, but you should migrate
+to the new names to avoid unnecessary log clutter and get prepared for more future changes. An example message
+looks like this:
+
+```
+[#|2023-03-31T16:55:27.992+0000|WARNING|Payara 5.2022.5|edu.harvard.iq.dataverse.settings.source.AliasConfigSource|_ThreadID=30;_ThreadName=RunLevelControllerThread-1680281704925;_TimeMillis=1680281727992;_LevelValue=900;|
+   Detected deprecated config option doi.username in use. Please update your config to use dataverse.pid.datacite.username.|#]
+```
+
+Here is a list of the new settings:
+
+- dataverse.pid.datacite.mds-api-url
+- dataverse.pid.datacite.rest-api-url
+- dataverse.pid.datacite.username
+- dataverse.pid.datacite.password
+- dataverse.pid.handlenet.key.path
+- dataverse.pid.handlenet.key.passphrase
+- dataverse.pid.handlenet.index
+- dataverse.pid.permalink.base-url
+- dataverse.pid.ezid.api-url
+- dataverse.pid.ezid.username
+- dataverse.pid.ezid.password
+
+See also https://guides.dataverse.org/en/5.14/installation/config.html#persistent-identifiers-and-publishing-datasets (multiple PRs: #8823 #8828)
+
+### Signposting for Dataverse
+
+This release adds [Signposting](https://signposting.org) support to Dataverse to improve machine discoverability of datasets and files. (PR #8424)
+
+The following MicroProfile Config options are now available (these can be treated as JVM options):
+
+- dataverse.signposting.level1-author-limit
+- dataverse.signposting.level1-item-limit
+
+Signposting is described in more detail in a new page in the Admin Guide on discoverability: https://guides.dataverse.org/en/5.14/admin/discoverability.html
+
+### Permalinks support
+
+Dataverse now optionally supports PermaLinks, a type of persistent identifier that does not involve a global registry service. PermaLinks are appropriate for Intranet deployment and catalog use cases. (PR #8674)
+
+
+### Creating datasets with incomplete metadata through API
+
+It is now possible to create a dataset with some nominally mandatory metadata fields left unpopulated. For details on the use case that lead to this feature see issue #8822 and PR #8940.
+
+The create dataset API call (POST to /api/dataverses/#dataverseId/datasets) is extended with the "doNotValidate" parameter. However, in order to be able to create a dataset with incomplete metadata, the Solr configuration must be updated first with the new "schema.xml" file (do not forget to run the metadata fields update script when you use custom metadata). Reindexing is optional, but recommended. Also, even when this feature is not used, it is recommended to update the Solr configuration and reindex the metadata. Finally, this new feature can be activated with the "dataverse.api.allow-incomplete-metadata" JVM option.
+
+You can also enable a valid/incomplete metadata filter in the "My Data" page using the "dataverse.ui.show-validity-filter" JVM option. By default, this filter is not shown. When you wish to use this filter, you must reindex the datasets first, otherwise datasets with valid metadata will not be shown in the results.
+
+It is not possible to publish datasets with incomplete or incomplete metadata. By default, you also cannot send such datasets for review. If you wish to enable sending for review of datasets with incomplete metadata, turn on the "dataverse.ui.allow-review-for-incomplete" JVM option.
+
+In order to customize the wording and add translations to the UI sections extended by this feature, you can edit the "Bundle.properties" file and the localized versions of that file. The property keys used by this feature are:
+- incomplete
+- valid
+- dataset.message.incomplete.warning
+- mydataFragment.validity
+- dataverses.api.create.dataset.error.mustIncludeAuthorName
+
+### Registering PIDs (DOIs or Handles) for files in select collections
+
+It is now possible to configure registering PIDs for files in individual collections.
+
+For example, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See the [:FilePIDsEnabled](https://guides.dataverse.org/en/5.14/installation/config.html#filepidsenabled) section of the Configuration guide for details. (PR #9614)
+
+### Mechanism Added for Adding External Exporters
+
+It is now possible for third parties to develop and share code to provide new metadata export formats for Dataverse. Export formats can be made available via the Dataverse UI and API or configured for use in Harvesting. Dataverse now provides developers with a separate dataverse-spi JAR file that contains the Java interfaces and classes required to create a new metadata Exporter. Once a new Exporter has been created and packaged as a JAR file, administrators can use it by specifying a local directory for third party Exporters, dropping then Exporter JAR there, and restarting Payara. This mechanism also allows new Exporters to replace any of Dataverse's existing metadata export formats. (PR #9175). See also https://guides.dataverse.org/en/5.14/developers/metadataexport.html
+
+#### Backward Incompatibilities
+
+Care should be taken when replacing Dataverse's internal metadata export formats as third party code, including other third party Exporters may depend on the contents of those export formats. When replacing an existing format, one must also remember to delete the cached metadata export files or run the reExport command for the metadata exports of existing datasets to be updated.
+
+#### New JVM/MicroProfile Settings
+
+dataverse.spi.export.directory - specifies a directory, readable by the Dataverse server. Any Exporter JAR files placed in this directory will be read by Dataverse and used to add/replace the specified metadata format.
+
+### Contact Email Improvements
+
+Email sent from the contact forms to the contact(s) for a collection, dataset, or datafile can now optionally be cc'd to a support email address. The support email address can be changed from the default :SystemEmail address to a separate :SupportEmail address. When multiple contacts are listed, the system will now send one email to all contacts (with the optional cc if configured) instead of separate emails to each contact. Contact names with a comma that refer to Organizations will no longer have the name parts reversed in the email greeting. A new protected/admin feedback API has been added. (PR #9186) See https://guides.dataverse.org/en/5.14/api/native-api.html#send-feedback-to-contact-s
+
+#### New JVM/MicroProfile Settings
+
+dataverse.mail.support-email - allows a separate email, distinct from the :SystemEmail to be used as the to address in emails from the contact form/ feedback api.
+dataverse.mail.cc-support-on-contact-emails - include the support email address as a CC: entry when contact/feedback emails are sent to the contacts for a collection, dataset, or datafile.
+
+### Support for Grouping Dataset Files by Folder and Category Tag
+
+Dataverse now supports grouping dataset files by folder and/or optionally by Tag/Category. The default for whether to order by folder can be changed via :OrderByFolder. Ordering by category must be enabled by an administrator via the :CategoryOrder parameter which is used to specify which tags appear first (e.g. to put Documentation files before Data or Code files, etc.) These Group-By options work with the existing sort options, i.e. sorting alphabetically means that files within each folder or tag group will be sorted alphabetically. :AllowUsersToManageOrdering can be set to true to allow users to turn folder ordering and category ordering (if enabled) on or off in the current dataset view. (PR #9204)
+
+#### New Settings
+
+:CategoryOrder - a comma separated list of Category/Tag names defining the order in which files with those tags should be displayed. The setting can include custom tag names along with the pre-defined defaults ( Documentation, Data, and Code, which can be overridden by the ::FileCategories setting.)
+:OrderByFolder - defaults to true - whether to group files in the same folder together
+:AllowUserManagementOfOrder - default false - allow users to toggle ordering on/off in the dataset display
+
+### Metadata field Series now repeatable
+
+This enhancement allows depositors to define multiple instances of the metadata field Series in the Citation Metadata block.
+
+Data contained in a dataset may belong to multiple series. Making the field repeatable makes it possible to reflect this fact in the dataset metadata. (PR #9256)
+
+### Guides in PDF Format
+
+An experimental version of the guides in PDF format is available at <http://preview.guides.gdcc.io/_/downloads/en/develop/pdf/> (PR #9474)
+
+Advice for anyone who wants to help improve the PDF is available at https://guides.dataverse.org/en/5.14/developers/documentation.html#pdf-version-of-the-guides
+
+### Datasets API extended
+
+The following APIs have been added: (PR #9592)
+
+- `/api/datasets/summaryFieldNames`
+- `/api/datasets/privateUrlDatasetVersion/{privateUrlToken}`
+- `/api/datasets/privateUrlDatasetVersion/{privateUrlToken}/citation`
+- `/api/datasets/{datasetId}/versions/{version}/citation`
+
+### Extra fields included in the JSON metadata
+
+The following fields are now available in the native JSON output:
+
+- `alternativePersistentId`
+- `publicationDate`
+- `citationDate`
+
+(PR #9657)
+
+
+### Files downloaded from Binder are now in their original format.
+
+For example, data.dta (a Stata file) will be downloaded instead of data.tab (the archival version Dataverse creates as part of a successful ingest). (PR #9483)
+
+This should make it easier to write code to reproduce results as the dataset authors and subsequent researchers are likely operating on the original file format rather that the format that Dataverse creates.
+
+For details, see #9374, <https://github.com/jupyterhub/repo2docker/issues/1242>, and <https://github.com/jupyterhub/repo2docker/pull/1253>.
+
+### Handling of license information fixed in the API
+
+(PR #9568)
+
+When publishing a dataset via API, it now requires the dataset to either have a standard license configured, or have valid Custom Terms of Use (if allowed by the instance). Attempting to publish a dataset without such **will fail with an error message**. This introduces a backward incompatibility, and if you have scripts that automatically create, update and publish datasets, this last step may start failing. Because, unfortunately, there were some problems with the datasets APIs that made it difficult to manage licenses, so an API user was likely to end up with a dataset missing either of the above. In this release we have addressed it by making the following fixes:
+
+We fixed the incompatibility between the format in which license information was *exported* in json, and the format the create and update APIs were expecting it for *import* (https://github.com/IQSS/dataverse/issues/9155). This means that the following json format can now be imported:
+```
+"license": {
+   "name": "CC0 1.0",
+   "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+}
+```
+However, for the sake of backward compatibility the old format
+```
+"license" : "CC0 1.0"
+```
+will be accepted as well.
+
+We have added the default license (CC0) to the model json file that we provide and recommend to use as the model in the Native API Guide (https://github.com/IQSS/dataverse/issues/9364). 
+
+And we have corrected the misleading language in the same guide where we used to recommend to users that they select, edit and re-import only the `.metadataBlocks` fragment of the json metadata representing the latest version. There are in fact other useful pieces of information that need to be preserved in the update (such as the `"license"` section above). So the recommended way of creating base json for updates via the API is to select *everything but* the `"files"` section, with (for example) the following `jq` command:
+
+```
+jq '.data | del(.files)'
+```
+
+Please see the [Update Metadata For a Dataset](https://guides.dataverse.org/en/5.14/api/native-api.html#update-metadata-for-a-dataset) section of our Native Api guide for more information. 
+
+
+### New External Tool Type and Implementation 
+
+With this release a new experimental external tool type has been added to the Dataverse Software. The tool type is "query" and its first implementation is an experimental tool named [Ask the Data](https://github.com/IQSS/askdataverse) which allows users to ask natural language queries of tabular files in Dataverse. More information is available in the External Tools section of the guides. (PR #9737) See https://guides.dataverse.org/en/5.14/admin/external-tools.html#file-level-query-tools
+
+### Default Value for File PIDs registration has changed
+
+The default for whether PIDs are registered for files or not is now false.
+
+Installations where file PIDs were enabled by default will have to add the :FilePIDsEnabled = true setting to maintain the existing functionality.
+
+See Step 9 of the upgrade instructions:
+
+  If your installation did not have :FilePIDsEnabled set, you will need to set it to true to keep file PIDs enabled:
+
+  curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled
+
+
+It is now possible to allow File PIDs to be enabled/disabled per collection. See the [:AllowEnablingFilePIDsPerCollection](https://guides.dataverse.org/en/latest/installation/config.html#allowenablingfilepidspercollection) section of the Configuration guide for details.
+
+For example, registration of PIDs for files can now be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. 
+
+
+### Changes and fixes in this release not already mentioned above include:
+
+- An endpoint for deleting a file has been added to the native API: https://guides.dataverse.org/en/5.14/api/native-api.html#deleting-files (PR #9383)
+- A date column has been added to the restricted file access request overview, indicating when the earliest request by that user was made. An issue was fixed where where the request list was not updated when a request was approved or rejected. (PR #9257)
+- Changes made in v5.13 and v5.14 in multiple PRs to improve the embedded Schema.org metadata in dataset pages will only be propagated to the Schema.Org JSON-LD metadata export if a reExportAll() is done. (PR #9102)
+- It is now possible to write external vocabulary scripts that target a single child field in a metadata block. Example scripts are now available at https://github.com/gdcc/dataverse-external-vocab-support that can be configured to support lookup from the Research Orgnaization Registry (ROR) for the Author Affiliation Field and for the CrossRef Funding Registry (Fundreg) in the Funding Information/Agency field, both in the standard Citation metadata block. Application if these scripts to other fields, and the development of other scripts targetting child fields are now possible (PR #9402)
+- Dataverse now supports requiring a secret key to add or edit metadata in specified "system" metadata blocks. Changing the metadata in such system metadata blocks is not allowed without the key and is currently only allowed via API. (PR #9388)
+- An attempt will be made to extract a geospatial bounding box (west, south, east, north) from NetCDF and HDF5 files and then insert these values into the geospatial metadata block, if enabled. (#9541) See https://guides.dataverse.org/en/5.14/user/dataset-management.html#geospatial-bounding-box
+- A file previewer called H5Web is now available for exploring and visualizing NetCDF and HDF5 files. (PR #9600) See https://guides.dataverse.org/en/5.14/user/dataset-management.html#h5web-previewer
+- Two file previewers for GeoTIFF and Shapefiles are now available for visualizing geotiff image files and zipped Shapefiles on a map. See https://github.com/gdcc/dataverse-previewers
+- New alternative to setup the Dataverse dependencies for the development environment through Docker Compose. (PR #9417)
+- New alternative, explained in the documentation, to build the Sphinx guides through a Docker container. (PR #9417)
+- A container has been added called "configbaker" that configures Dataverse while running in containers. This allows developers to spin up Dataverse with a single command. (PR #9574)
+- Direct upload via the Dataverse UI will now support any algorithm configured via the `:FileFixityChecksumAlgorithm` setting. External apps using the direct upload API can now query Dataverse to discover which algorithm should be used. Sites that have been using an algorithm other than MD5 and direct upload and/or dvwebloader may want to use the `/api/admin/updateHashValues` call (see https://guides.dataverse.org/en/5.14/installation/config.html?highlight=updatehashvalues#filefixitychecksumalgorithm) to replace any MD5 hashes on existing files. (PR #9482)
+- The OAI_ORE metadata export (and hence the archival Bag for a dataset) now includes information about file embargoes. (PR #9698)
+- DatasetFieldType attribute "displayFormat", is now returned by the API. (PR #9668)
+- An API named "MyData" has been available for years but is newly documented. It is used to get a list of the objects (datasets, collections or datafiles) that an authenticated user can modify. (PR #9596)
+- A Go client library for Dataverse APIs is now available. See https://guides.dataverse.org/en/5.14/api/client-libraries.html
+- A feature flag called "api-session-auth" has been added temporarily to aid in the development of the new frontend (#9063) but will be removed once bearer tokens (#9229) have been implemented. There is a security risk (CSRF) in enabling this flag! Do not use it in production! For more information, see https://guides.dataverse.org/en/5.14/installation/config.html#feature-flags
+- A feature flag called "api-bearer-auth" has been added. This allows OIDC useraccounts to send authenticated API requests using Bearer Tokens. Note: This feature is limited to OIDC! For more information, see https://guides.dataverse.org/en/5.14/installation/config.html#feature-flags (PR #9591)
+
+
+## Complete List of Changes
+
+For the complete list of code changes in this release, see the [5.14 milestone](https://github.com/IQSS/dataverse/milestone/108?closed=1) on GitHub.
diff --git a/doc/release-notes/6.0-release-notes.md b/doc/release-notes/6.0-release-notes.md
new file mode 100644
index 00000000000..858cd604bda
--- /dev/null
+++ b/doc/release-notes/6.0-release-notes.md
@@ -0,0 +1,316 @@
+# Dataverse 6.0
+
+This is a platform upgrade release. Payara, Solr, and Java have been upgraded. No features have been added to the Dataverse software itself. Only a handful of bugs were fixed.
+
+Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project!
+
+## Release Highlights (Major Upgrades, Breaking Changes)
+
+This release contains major upgrades to core components. Detailed upgrade instructions can be found below.
+
+### Runtime
+
+- The required Java version has been increased from version 11 to 17.
+    - See PR #9764 for details.
+- Payara application server has been upgraded to version 6.2023.8.
+    - This is a required update.
+    - Please note that Payara Community 5 has reached [end of life](https://www.payara.fish/products/payara-platform-product-lifecycle/)
+    - See PR #9685 and PR #9795 for details.
+- Solr has been upgraded to version 9.3.0.
+    - See PR #9787 for details.
+- PostgreSQL 13 remains the tested and supported version.
+    - That said, the installer and Flyway have been upgraded to support PostgreSQL 14 and 15. See the [PostgreSQL](https://guides.dataverse.org/en/6.0/installation/prerequisites.html#postgresql) section of the Installation Guide and PR #9877 for details.
+
+### Development
+
+- Removal of Vagrant and Docker All In One (docker-aio), deprecated in Dataverse v5.14. See PR #9838 and PR #9685 for details.
+- All tests have been migrated to use JUnit 5 exclusively from now on. See PR #9796 for details.
+
+## Installation
+
+If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it!
+
+Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club!
+
+You are also very welcome to join the [Global Dataverse Community Consortium](https://dataversecommunity.global) (GDCC).
+
+## Upgrade Instructions
+
+Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc.
+
+These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 5.14.
+
+### Upgrade from Java 11 to Java 17
+
+Java 17 is now required for Dataverse. Solr can run under Java 11 or Java 17 but the latter is recommended. In preparation for the Java upgrade, stop both Dataverse/Payara and Solr.
+
+1. Undeploy Dataverse, if deployed, using the unprivileged service account.
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications`
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14`
+
+1. Stop Payara 5.
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain`
+
+1. Stop Solr 8.
+
+   `sudo systemctl stop solr.service`
+
+1. Install Java 17.
+
+   Assuming you are using RHEL or a derivative such as Rocky Linux:
+
+   `sudo yum install java-17-openjdk`
+
+1. Set Java 17 as the default.
+
+   Assuming you are using RHEL or a derivative such as Rocky Linux:
+
+   `sudo alternatives --config java`
+
+1. Test that Java 17 is the default.
+
+   `java -version`
+
+### Upgrade from Payara 5 to Payara 6
+
+If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user.
+
+1. Download Payara 6.2023.8.
+
+   `curl -L -O https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip`
+
+1. Unzip it to /usr/local (or your preferred location).
+
+   `sudo unzip payara-6.2023.8.zip -d /usr/local/`
+
+1. Change ownership of the unzipped Payara to your "service" user ("dataverse" by default).
+
+   `sudo chown -R dataverse /usr/local/payara6`
+
+1. Undeploy Dataverse, if deployed, using the unprivileged service account.
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin list-applications`
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin undeploy dataverse-5.14`
+
+1. Stop Payara 5, if running.
+
+   `sudo -u dataverse /usr/local/payara5/bin/asadmin stop-domain`
+
+1. Copy Dataverse-related lines from Payara 5 to Payara 6 domain.xml.
+
+   `sudo -u dataverse cp /usr/local/payara6/glassfish/domains/domain1/config/domain.xml /usr/local/payara6/glassfish/domains/domain1/config/domain.xml.orig`
+
+   `sudo egrep 'dataverse|doi' /usr/local/payara5/glassfish/domains/domain1/config/domain.xml > lines.txt`
+
+   `sudo vi /usr/local/payara6/glassfish/domains/domain1/config/domain.xml`
+
+   The lines will appear in two sections, examples shown below (but your content will vary).
+
+   Section 1: system properties (under `<server name="server" config-ref="server-config">`)
+
+   ```
+   <system-property name="dataverse.db.user" value="dvnuser"></system-property>
+   <system-property name="dataverse.db.host" value="localhost"></system-property>
+   <system-property name="dataverse.db.port" value="5432"></system-property>
+   <system-property name="dataverse.db.name" value="dvndb"></system-property>
+   <system-property name="dataverse.db.password" value="dvnsecret"></system-property>
+   ```
+
+   Note: if you used the Dataverse installer, you won't have a `dataverse.db.password` property. See "Create password aliases" below.
+
+   Section 2: JVM options (under `<java-config classpath-suffix="" debug-options="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=9009" system-classpath="">`, the one under `<config name="server-config">`, not under `<config name="default-config">`)
+
+   ```
+   <jvm-options>-Ddataverse.files.directory=/usr/local/dvn/data</jvm-options>
+   <jvm-options>-Ddataverse.files.file.type=file</jvm-options>
+   <jvm-options>-Ddataverse.files.file.label=file</jvm-options>
+   <jvm-options>-Ddataverse.files.file.directory=/usr/local/dvn/data</jvm-options>
+   <jvm-options>-Ddataverse.rserve.host=localhost</jvm-options>
+   <jvm-options>-Ddataverse.rserve.port=6311</jvm-options>
+   <jvm-options>-Ddataverse.rserve.user=rserve</jvm-options>
+   <jvm-options>-Ddataverse.rserve.password=rserve</jvm-options>
+   <jvm-options>-Ddataverse.auth.password-reset-timeout-in-minutes=60</jvm-options>
+   <jvm-options>-Ddataverse.timerServer=true</jvm-options>
+   <jvm-options>-Ddataverse.fqdn=dev1.dataverse.org</jvm-options>
+   <jvm-options>-Ddataverse.siteUrl=https://dev1.dataverse.org</jvm-options>
+   <jvm-options>-Ddataverse.files.storage-driver-id=file</jvm-options>
+   <jvm-options>-Ddoi.username=testaccount</jvm-options>
+   <jvm-options>-Ddoi.password=notmypassword</jvm-options>
+   <jvm-options>-Ddoi.baseurlstring=https://mds.test.datacite.org/</jvm-options>
+   <jvm-options>-Ddoi.dataciterestapiurlstring=https://api.test.datacite.org</jvm-options>
+   ```
+
+1. Check the `Xmx` setting in `domain.xml`.
+
+   Under `/usr/local/payara6/glassfish/domains/domain1/config/domain.xml`, check the `Xmx` setting under `<config name="server-config">`, where you put the JVM options, not the one under `<config name="default-config">`. Note that there are two such settings, and you want to adjust the one in the stanza with Dataverse options. This sets the JVM heap size; a good rule of thumb is half of your system's total RAM. You may specify the value in MB (`8192m`) or GB (`8g`).
+
+1. Copy `jhove.conf` and `jhoveConfig.xsd` from Payara 5, edit and change `payara5` to `payara6`.
+
+   `sudo cp /usr/local/payara5/glassfish/domains/domain1/config/jhove* /usr/local/payara6/glassfish/domains/domain1/config/`
+
+   `sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/config/jhove*`
+
+   `sudo -u dataverse vi /usr/local/payara6/glassfish/domains/domain1/config/jhove.conf`
+
+1. Copy logos from Payara 5 to Payara 6.
+
+   These logos are for collections (dataverses).
+
+   `sudo -u dataverse cp -r /usr/local/payara5/glassfish/domains/domain1/docroot/logos /usr/local/payara6/glassfish/domains/domain1/docroot`
+
+1. If you are using Make Data Count (MDC), edit :MDCLogPath.
+
+   Your `:MDCLogPath` database setting might be pointing to a Payara 5 directory such as `/usr/local/payara5/glassfish/domains/domain1/logs`. If so, edit this to be Payara 6. You'll probably want to copy your logs over as well.
+
+1. If you've enabled access logging or any other site-specific configuration, be sure to preserve them. For instance, the default domain.xml includes
+
+   ```
+        <http-service>
+        <access-log></access-log>
+   ```
+
+   but you may wish to include
+
+   ```
+        <http-service access-logging-enabled="true">
+        <access-log format="%client.name% %datetime% %request% %status% %response.length% %header.user-agent% %header.referer% %cookie.JSESSIONID% %header.x-forwarded-for%"></access-log>
+   ```
+
+   Be sure to keep a previous copy of your domain.xml for reference.
+
+1. Update systemd unit file (or other init system) from `/usr/local/payara5` to `/usr/local/payara6`, if applicable.
+
+1. Start Payara.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain`
+
+1. Create a Java mail resource, replacing "localhost" for mailhost with your mail relay server, and replacing "localhost" for fromaddress with the FQDN of your Dataverse server.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin create-javamail-resource --mailhost "localhost" --mailuser "dataversenotify" --fromaddress "do-not-reply@localhost" mail/notifyMailSession`
+
+1. Create password aliases for your database, rserve and datacite jvm-options, if you're using them.
+
+   `echo "AS_ADMIN_ALIASPASSWORD=yourDBpassword" > /tmp/dataverse.db.password.txt`
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin create-password-alias --passwordfile /tmp/dataverse.db.password.txt`
+
+   When you are prompted "Enter the value for the aliasname operand", enter `dataverse.db.password`
+
+   You should see "Command create-password-alias executed successfully."
+
+   You'll want to perform similar commands for `rserve_password_alias` and `doi_password_alias` if you're using Rserve and/or DataCite.
+
+1. Enable workaround for FISH-7722.
+
+   The following workaround is for https://github.com/payara/Payara/issues/6337
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED`
+
+1. Create the network listener on port 8009.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector`
+
+1. Deploy the Dataverse 6.0 war file.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin deploy /path/to/dataverse-6.0.war`
+
+1. Check that you get a version number from Dataverse.
+
+   This is just a sanity check that Dataverse has been deployed properly.
+
+   `curl http://localhost:8080/api/info/version`
+
+1. Perform one final Payara restart to ensure that timers are initialized properly.
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin stop-domain`
+
+   `sudo -u dataverse /usr/local/payara6/bin/asadmin start-domain`
+
+### Upgrade from Solr 8 to 9
+
+Solr has been upgraded to Solr 9. You must install Solr fresh and reindex. You cannot use your old `schema.xml` because the format has changed.
+
+The instructions below are copied from https://guides.dataverse.org/en/6.0/installation/prerequisites.html#installing-solr and tweaked a bit for an upgrade scenario.
+
+We assume that you already have a user called "solr" (from the instructions above), added during your initial installation of Solr. We also assume that you have already stopped Solr 8 as explained in the instructions above about upgrading Java.
+
+1. Become the "solr" user and then download and configure Solr.
+
+   `su - solr`
+
+   `cd /usr/local/solr`
+
+   `wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz`
+
+   `tar xvzf solr-9.3.0.tgz`
+
+   `cd solr-9.3.0`
+
+   `cp -r server/solr/configsets/_default server/solr/collection1`
+
+1. Unzip "dvinstall.zip" from this release. Unzip it into /tmp. Then copy the following files into place.
+
+   `cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf`
+
+   `cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf`
+
+1. A Dataverse installation requires a change to the jetty.xml file that ships with Solr.
+
+   Edit `/usr/local/solr/solr-9.3.0/server/etc/jetty.xml`, increasing `requestHeaderSize` from `8192` to `102400`
+
+1. Tell Solr to create the core "collection1" on startup.
+
+   `echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties`
+
+1. Update your init script.
+
+   Your init script may be located at `/etc/systemd/system/solr.service`, for example. Update the path to Solr to be `/usr/local/solr/solr-9.3.0`.
+
+1. Start Solr using your init script and check collection1.
+
+   The collection1 check below should print out fields Dataverse uses like "dsDescription".
+
+   `systemctl start solr.service`
+
+   `curl http://localhost:8983/solr/collection1/schema/fields`
+
+1. Reindex Solr.
+
+   For details, see https://guides.dataverse.org/en/6.0/admin/solr-search-index.html but here is the reindex command:
+
+   `curl http://localhost:8080/api/admin/index`
+
+1. If you have custom metadata blocks installed, you must update your Solr `schema.xml` to include your custom fields.
+
+   For details, please see https://guides.dataverse.org/en/6.0/admin/metadatacustomization.html#updating-the-solr-schema
+
+   At a high level you will be copying custom fields from the output of http://localhost:8080/api/admin/index/solr/schema or using a script to automate this.
+
+## Potential Archiver Incompatibilities with Payara 6
+
+The [Google Cloud and DuraCloud archivers](https://guides.dataverse.org/en/5.14/installation/config.html#bagit-export) may not work in Dataverse 6.0.
+
+This is due to the archivers' dependence on libraries that include classes in `javax.* packages` that are no longer available. If these classes are actually used when the archivers run, the archivers would fail. As these two archivers require additional setup, they have not been tested in 6.0. Community members using these archivers or considering their use are encouraged to test them with 6.0 and report any errors and/or provide fixes for them that can be included in future releases.
+
+## Bug Fix for Dataset Templates with Custom Terms of Use
+
+A bug was fixed for the following scenario:
+
+- Create a template with custom terms.
+- Set that template as the default.
+- Try to create a dataset.
+- A 500 error appears before the form to create dataset is even shown.
+
+For more details, see issue #9825 and PR #9892
+
+## Complete List of Changes
+
+For the complete list of code changes in this release, see the [6.0 Milestone](https://github.com/IQSS/dataverse/milestone/109?closed=1) in GitHub.
+
+## Getting Help
+
+For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org.
diff --git a/doc/release-notes/6.1-release-notes.md b/doc/release-notes/6.1-release-notes.md
new file mode 100644
index 00000000000..1279d09a023
--- /dev/null
+++ b/doc/release-notes/6.1-release-notes.md
@@ -0,0 +1,322 @@
+# Dataverse 6.1
+
+Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.1 rather than the list of releases, which will cut them off.
+
+This release brings new features, enhancements, and bug fixes to the Dataverse software.
+Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project.
+
+## Release highlights
+
+### Guestbook at request
+
+Dataverse can now be configured (via the `dataverse.files.guestbook-at-request` option) to display any configured guestbook to users when they request restricted files (new functionality) or when they download files (previous behavior).
+
+The global default defined by this setting can be overridden at the collection level on the collection page and at the individual dataset level by a superuser using the API. The default, showing guestbooks when files are downloaded, remains as it was in prior Dataverse versions.
+
+For details, see [dataverse.files.guestbook-at-request](https://guides.dataverse.org/en/6.1/installation/config.html#dataverse-files-guestbook-at-request) and PR #9599.
+
+### Collection-level storage quotas
+
+This release adds support for defining storage size quotas for collections. Please see the API guide for details. This is an experimental feature that has not yet been used in production on any real life Dataverse instance, but we are planning to try it out at Harvard/IQSS.
+
+Please note that this release includes a database update (via a Flyway script) that will calculate the storage sizes of all the existing datasets and collections on the first deployment. On a large production database with tens of thousands of datasets this may add a couple of extra minutes to the first, initial deployment of Dataverse 6.1.
+
+For details, see [Storage Quotas for Collections](https://guides.dataverse.org/en/6.1/admin/collectionquotas.html) in the Admin Guide.
+
+### Globus support (experimental), continued
+
+Globus support in Dataverse has been expanded to include support for using file-based Globus endpoints, including the case where files are stored on tape and are not immediately accessible and for the case of referencing files stored on remote Globus endpoints. Support for using the Globus S3 Connector with an S3 store has been retained but requires changes to the Dataverse configuration. Please note:
+
+- Globus functionality remains experimental/advanced in that it requires significant setup, differs in multiple ways from other file storage mechanisms, and may continue to evolve with the potential for backward incompatibilities.
+- The functionality is configured per store and replaces the previous single-S3-Connector-per-Dataverse-instance model.
+- Adding files to a dataset, and accessing files is supported via the Dataverse user interface through a separate [dataverse-globus app](https://github.com/scholarsportal/dataverse-globus).
+- The functionality is also accessible via APIs (combining calls to the Dataverse and Globus APIs)
+
+Backward incompatibilities:
+- The configuration for use of a Globus S3 Connector has changed and is aligned with the standard store configuration mechanism
+- The new functionality is incompatible with older versions of the globus-dataverse app and the Globus-related functionality in the UI will only function correctly if a Dataverse 6.1 compatible version of the dataverse-globus app is configured.
+
+New JVM options:
+- A new "globus" store type and associated store-related options have been added. These are described in the [File Storage](https://guides.dataverse.org/en/6.1/installation/config.html#file-storage) section of the Installation Guide.
+- dataverse.files.globus-cache-maxage - specifies the number of minutes Dataverse will wait between an initial request for a file transfer occurs and when that transfer must begin.
+
+Obsolete Settings: the :GlobusBasicToken, :GlobusEndpoint, and :GlobusStores settings are no longer used
+
+Further details can be found in the [Big Data Support](https://guides.dataverse.org/en/6.1/developers/big-data-support.html#big-data-support) section of the Developer Guide.
+
+### Alternative Title now allows multiple values
+
+Alternative Title now allows multiples. Note that JSON used to create a dataset with an Alternate Title must be changed. See "Backward incompatibilities" below and PR #9440 for details.
+
+### External tools: configure tools now available at the dataset level
+
+Read/write "configure" tools (a type of external tool) are now available at the dataset level. They appear under the "Edit Dataset" menu. See [External Tools](https://guides.dataverse.org/en/6.1/admin/external-tools.html#dataset-level-configure-tools) in the Admin Guide and PR #9925.
+
+### S3 out-of-band upload
+
+In some situations, direct upload might not work from the UI, e.g., when s3 storage is not accessible from the internet. This pull request adds an option to [allow direct uploads via API only](https://github.com/IQSS/dataverse/pull/9003). This way, a third party application can use direct upload from within the internal network, while there is no direct download available to the users via UI.
+By default, Dataverse supports uploading files via the [add a file to a dataset](https://guides.dataverse.org/en/6.1/api/native-api.html#add-a-file-to-a-dataset) API. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server).
+With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the [Adding the Uploaded file to the Dataset](https://guides.dataverse.org/en/6.1/developers/s3-direct-upload-api.html#adding-the-uploaded-file-to-the-dataset) API call (described in the [Direct DataFile Upload/Replace API](https://guides.dataverse.org/en/6.1/developers/s3-direct-upload-api.html) page) used to add metadata and inform Dataverse that a new file has been added to the relevant store.
+
+### JSON Schema for datasets
+
+Functionality has been added to help validate dataset JSON prior to dataset creation. There are two new API endpoints in this release. The first takes in a collection alias and returns a custom dataset schema based on the required fields of the collection. The second takes in a collection alias and a dataset JSON file and does an automated validation of the JSON file against the custom schema for the collection. In this release functionality is limited to JSON format validation and validating required elements. Future releases will address field types, controlled vocabulary, etc. See [Retrieve a Dataset JSON Schema for a Collection](https://guides.dataverse.org/en/6.1/api/native-api.html#retrieve-a-dataset-json-schema-for-a-collection) in the API Guide and PR #10109.
+
+### OpenID Connect (OIDC) improvements
+
+#### Using MicroProfile Config for provisioning
+
+With this release it is possible to provision a single OIDC-based authentication provider by using MicroProfile Config instead of or in addition to the classic Admin API provisioning.
+
+If you are using an external OIDC provider component as an identity management system and/or broker to other authentication providers such as Google, eduGain SAML and so on, this might make your life easier during instance setups and reconfiguration. You no longer need to generate the necessary JSON file.
+
+#### Adding PKCE Support
+
+Some OIDC providers require using PKCE as additional security layer. As of this version, you can enable support for this on any OIDC provider you configure. (Note that OAuth2 providers have not been upgraded.)
+
+For both features, see the [OIDC](https://guides.dataverse.org/en/6.0/installation/oidc.html) section of the Installation Guide and PR #9273.
+
+### Solr improvements
+
+As of this release, application-side support has been added for the "circuit breaker" mechanism in Solr that makes it drop requests more gracefully when the search engine is experiencing load issues.
+
+Please see the [Installing Solr](https://guides.dataverse.org/en/6.1/installation/prerequisites.html#installing-solr) section of the Installation Guide.
+
+### New release of Dataverse Previewers (including a Markdown previewer)
+
+Version 1.4 of the standard Dataverse Previewers from https://github/com/gdcc/dataverse-previewers is available. The new version supports the use of signedUrls rather than API keys when previewing restricted files (including files in draft dataset versions). Upgrading is highly recommended. Please note:
+
+- SignedUrls can now be used with PrivateUrl access tokens, which allows PrivateUrl users to view previewers that are configured to use SignedUrls. See #10093.
+- Launching a dataset-level configuration tool will automatically generate an API token when needed. This is consistent with how other types of tools work. See #10045.
+- There is now a [Markdown (.md)](https://guides.dataverse.org/en/6.1/user/dataset-management.html#file-previews) previewer.
+
+### New or improved APIs
+
+The development of a [new UI for Dataverse](https://github.com/IQSS/dataverse-frontend) is driving the addition or improvement of many APIs.
+
+#### New API endpoints
+
+- deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession): version deaccessioning through API (Given a dataset and a version).
+- /api/files/{id}/downloadCount
+- /api/files/{id}/dataTables 
+- /api/files/{id}/metadata/tabularTags New endpoint to set tabular file tags.
+- canManageFilePermissions (/access/datafile/{id}/userPermissions) Added for getting user permissions on a file.
+- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Given a dataset and its version, retrieves file counts based on different criteria (Total count, per content type, per access status and per category name).
+- setFileCategories (/api/files/{id}/metadata/categories): Updates the categories (by name) for an existing file. If the specified categories do not exist, they will be created.
+- userFileAccessRequested (/api/access/datafile/{id}/userFileAccessRequested): Returns true or false depending on whether or not the calling user has requested access to a particular file.
+- hasBeenDeleted (/api/files/{id}/hasBeenDeleted): Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version.
+- getZipDownloadLimit (/api/info/zipDownloadLimit): Get the configured zip file download limit. The response contains the long value of the limit in bytes.
+- getMaxEmbargoDurationInMonths (/api/info/settings/:MaxEmbargoDurationInMonths): Get the maximum embargo duration in months, if available, configured through the database setting :MaxEmbargoDurationInMonths.
+- getDatasetJsonSchema (/api/dataverses/{id}/datasetSchema): Get a dataset schema with the fields required by a given dataverse collection.
+- validateDatasetJsonSchema (/api/dataverses/{id}/validateDatasetJson): Validate that a dataset JSON file is in proper format and contains the required elements and fields for a given dataverse collection.
+- downloadTmpFile (/api/admin/downloadTmpFile): For testing purposes, allows files to be downloaded from /tmp.
+
+#### Pagination of files in dataset versions
+
+- optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions
+- a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output
+- when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files.
+
+This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/6.1/api/native-api.html#dataset-versions-api) section of the Guide.
+
+
+#### DataFile API payload has been extended to include the following fields
+
+- tabularData: Boolean field to know if the DataFile is of tabular type
+- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner)
+- friendlyType: String
+
+#### The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support pagination, ordering, and optional filtering
+
+- Access status: through the `accessStatus` query parameter, which supports the following values:
+    - Public
+    - Restricted
+    - EmbargoedThenRestricted
+    - EmbargoedThenPublic
+- Category name: through the `categoryName` query parameter. To return files to which the particular category has been added.
+- Content type: through the `contentType` query parameter. To return files matching the requested content type. For example: "image/png".
+
+#### Additional improvements to existing API endpoints
+
+- getVersionFiles (/api/datasets/{id}/versions/{versionId}/files): Extended to support optional filtering by search text through the `searchText` query parameter. The search will be applied to the labels and descriptions of the dataset files. Added `tabularTagName` to return files to which the particular tabular tag has been added. Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files.
+- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain file counts. Added support for filtering by optional criteria query parameter:
+    - contentType
+    - accessStatus
+    - categoryName
+    - tabularTagName
+    - searchText
+- getDownloadSize ("api/datasets/{identifier}/versions/{versionId}/downloadsize"): Added optional boolean query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files. Added a new optional query parameter "mode" 
+This parameter applies a filter criteria to the operation and supports the following values:
+    - All (Default): Includes both archival and original sizes for tabular files
+    - Archival: Includes only the archival size for tabular files
+    - Original: Includes only the original size for tabular files.
+- /api/datasets/{id}/versions/{versionId} New query parameter `includeDeaccessioned` added to consider deaccessioned versions when searching for versions.
+- /api/datasets/{id}/userPermissions Get user permissions on a dataset, in particular, the user permissions that this API call checks, returned as booleans, are the following:
+    - Can view the unpublished dataset
+    - Can edit the dataset
+    - Can publish the dataset
+    - Can manage the dataset permissions
+    - Can delete the dataset draft
+- getDatasetVersionCitation (/api/datasets/{id}/versions/{versionId}/citation) endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain the citation.
+
+### Improvements for developers
+
+- Developers can enjoy a dramatically faster feedback loop when iterating on code if they are using Netbeans or IntelliJ IDEA Ultimate (with the Payara Platform Tools plugin). For details, see https://guides.dataverse.org/en/6.1/container/dev-usage.html#intellij-idea-ultimate-and-payara-platform-tools and [the thread](https://groups.google.com/g/dataverse-community/c/zNBDzSMF2Q0/m/Z-xS6fA2BgAJ) on the mailing list.
+- Developers can now test S3 locally by using the Dockerized development environment, which now includes both LocalStack and MinIO. API (end to end) tests are in S3AccessIT.
+- In addition, a new integration test class (not an API test, the new Testcontainers-based test launched with `mvn verify`) has been added at S3AccessIOLocalstackIT. It uses Testcontainers to spin up Localstack for S3 testing and does not require Dataverse to be running.
+- With this release, we add a new type of testing to Dataverse: integration tests which are not end-to-end tests (like our API tests). Starting with OIDC authentication support, we test regularly on CI for working condition of both OIDC login options in UI and API.
+- The testing and development Keycloak realm has been updated with more users and compatibility with Keycloak 21.
+- The support for setting JVM options during testing has been improved for developers. You now may add the `@JvmSetting` annotation to classes (also inner classes) and reference factory methods for values. This improvement is also paving the way to enable manipulating JVM options during end-to-end tests on remote ends.
+- As part of these testing improvements, the code coverage report file for unit tests has moved from `target/jacoco.exec` to `target/coverage-reports/jacoco-unit.exec`.
+
+## Major use cases and infrastructure enhancements
+
+Changes and fixes in this release not already mentioned above include:
+
+- Validation has been added for the Geographic Bounding Box values in the Geospatial metadata block. This will prevent improperly defined bounding boxes from being created via the edit page or metadata imports. This also fixes the issue where existing datasets with invalid geoboxes were quietly failing to get reindexed. See PR #10142.
+- Dataverse's OAI_ORE Metadata Export format and archival BagIT exports
+(which include the OAI-ORE metadata export file) have been updated to include
+information about the dataset version state, e.g. RELEASED or DEACCESSIONED
+and to indicate which version of Dataverse was used to create the archival Bag.
+As part of the latter, the current OAI_ORE Metadata format has been given a 1.0.0
+version designation and it is expected that any future changes to the OAI_ORE export
+format will result in a version change and that tools such as DVUploader that can
+recreate datasets from archival Bags will start indicating which version(s) of the
+OAI_ORE format they can read.
+Dataverse installations that have been using archival Bags may wish to update any
+existing archival Bags they have, e.g. by deleting existing Bags and using the Dataverse
+[archival Bag export API](https://guides.dataverse.org/en/6.1/installation/config.html#bagit-export-api-calls)
+to generate updated versions.
+- For BagIT export, it is now possible to configure the following information in bag-info.txt. (Previously, customization was possible by editing `Bundle.properties` but this is no longer supported.) For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt
+  - Source-Organization from `dataverse.bagit.sourceorg.name`.
+  - Organization-Address from `dataverse.bagit.sourceorg.address`.
+  - Organization-Email from `dataverse.bagit.sourceorg.address`.
+- This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec.
+  - To fix #9952, we surround the license info with `<` and `>`.
+  - To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information
+  - To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now.
+- It's now possible to configure the docroot, which holds collection logos and more. See [dataverse.files.docroot](https://guides.dataverse.org/en/6.1/installation/config.html#dataverse-files-docroot) in the Installation Guide and PR #9819. 
+- We have started maintaining an API changelog of breaking changes: https://guides.dataverse.org/en/6.1/api/changelog.html
+See also #10060.
+
+## New configuration options
+
+- dataverse.auth.oidc.auth-server-url
+- dataverse.auth.oidc.client-id
+- dataverse.auth.oidc.client-secret
+- dataverse.auth.oidc.enabled
+- dataverse.auth.oidc.pkce.enabled
+- dataverse.auth.oidc.pkce.max-cache-age
+- dataverse.auth.oidc.pkce.max-cache-size
+- dataverse.auth.oidc.pkce.method
+- dataverse.auth.oidc.subtitle
+- dataverse.auth.oidc.title
+- dataverse.bagit.sourceorg.address
+- dataverse.bagit.sourceorg.address
+- dataverse.bagit.sourceorg.name
+- dataverse.files.docroot
+- dataverse.files.globus-cache-maxage
+- dataverse.files.guestbook-at-request
+- dataverse.files.{driverId}.upload-out-of-band
+
+## Backward incompatibilities
+
+- Since Alternative Title is now repeatable, the JSON you send to create or edit a dataset must be an array rather than a simple string. For example, instead of "value": "Alternative Title", you must send "value": ["Alternative Title1", "Alternative Title2"]
+- Several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification introduce backward-incompatibility. See above for details.
+- For BagIT export, if you were configuring values in bag-info.txt using `Bundle.properties`, you must switch to the new `dataverse.bagit` JVM options mentioned above. For details, see https://guides.dataverse.org/en/6.1/installation/config.html#bag-info-txt
+- See "Globus support" above for backward incompatibilies specific to Globus.
+
+## Complete list of changes
+
+For the complete list of code changes in this release, see the [6.1 Milestone](https://github.com/IQSS/dataverse/milestone/110?closed=1) in GitHub.
+
+## Getting help
+
+For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/forum/#!forum/dataverse-community) or email support@dataverse.org.
+
+## Installation
+
+If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it!
+
+Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club!
+
+You are also very welcome to join the [Global Dataverse Community Consortium](https://www.gdcc.io/) (GDCC).
+
+## Upgrade instructions
+Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc.
+
+These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.0.
+
+0\. These instructions assume that you are upgrading from 6.0. If you are running an earlier version, the only safe way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to 5.14. 
+
+If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user.
+
+In the following commands we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed.
+
+`export PAYARA=/usr/local/payara6`
+
+(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell)
+
+1\. Undeploy the previous version.
+
+- `$PAYARA/bin/asadmin undeploy dataverse-6.0`
+
+2\. Stop Payara and remove the generated directory
+
+- `service payara stop`
+- `rm -rf $PAYARA/glassfish/domains/domain1/generated`
+
+3\. Start Payara
+
+- `service payara start`
+
+4\. Deploy this version.
+
+- `$PAYARA/bin/asadmin deploy dataverse-6.1.war`
+
+As noted above, deployment of the war file might take several minutes due a database migration script required for the new storage quotas feature.
+
+5\. Restart Payara
+
+- `service payara stop`
+- `service payara start`
+
+6\. Update Geospatial Metadata Block (to improve validation of bounding box values)
+
+- `wget https://github.com/IQSS/dataverse/releases/download/v6.1/geospatial.tsv`
+- `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file @geospatial.tsv`
+
+6a\. Update Citation Metadata Block (to make Alternative Title repeatable)
+
+- `curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file scripts/api/data/metadatablocks/citation.tsv`
+
+7\. Upate Solr schema.xml to allow multiple Alternative Titles to be used. See specific instructions below for those installations without custom metadata blocks (7a) and those with  custom metadata blocks  (7b).
+
+7a\. For installations without custom or experimental metadata blocks:
+
+- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script))
+
+- Replace schema.xml
+
+  - `cp /tmp/dvinstall/schema.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf`
+
+- Start Solr instance (usually `service solr start`, depending on Solr/OS)
+
+7b\. For installations with custom or experimental metadata blocks:
+
+- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/5.14/installation/prerequisites.html#solr-init-script))
+
+- There are 2 ways to regenerate the schema: Either by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed):
+```
+	wget https://raw.githubusercontent.com/IQSS/dataverse/master/conf/solr/9.3.0/update-fields.sh
+	chmod +x update-fields.sh
+	curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.3.0/server/solr/collection1/conf/schema.xml
+```
+OR, alternatively, you can edit the following line in your schema.xml by hand as follows (to indicate that alternative title is now `multiValued="true"`):
+```
+     <field name="alternativeTitle" type="text_en" multiValued="true" stored="true" indexed="true"/>
+```
+     
+- Restart Solr instance (usually `service solr restart` depending on solr/OS)
+
+8\. Run ReExportAll to update dataset metadata exports. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/5.14/admin/metadataexport.html#batch-exports-through-the-api).
diff --git a/doc/shib/shib.md b/doc/shib/shib.md
index 2c178a93f35..9cff6d827e7 100644
--- a/doc/shib/shib.md
+++ b/doc/shib/shib.md
@@ -82,11 +82,7 @@ Run `service httpd restart`.
 
 ## Update/verify files under /etc/shibboleth
 
-For /etc/shibboleth/shibboleth2.xml use the version from https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/shibboleth2.xml but replace "pdurbin.pagekite.me" with the "shibtest.dataverse.org".
-
-Put https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/dataverse-idp-metadata.xml at /etc/shibboleth/dataverse-idp-metadata.xml
-
-Put https://github.com/IQSS/dataverse/blob/master/conf/vagrant/etc/shibboleth/attribute-map.xml at 
+Get files from the Installation Guide.
 
 After making these changes, run `service shibd restart` and `service httpd restart`.
 
diff --git a/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml b/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
index 4f338905751..26144544d9e 100644
--- a/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
+++ b/doc/sphinx-guides/source/_static/admin/counter-processor-config.yaml
@@ -1,8 +1,8 @@
 # currently no other option but to have daily logs and have year-month-day format in the name with
 # 4-digit year and 2-digit month and day
-# /usr/local/payara5/glassfish/domains/domain1/logs/counter_2019-01-11.log
+# /usr/local/payara6/glassfish/domains/domain1/logs/counter_2019-01-11.log
 #log_name_pattern: sample_logs/counter_(yyyy-mm-dd).log
-log_name_pattern: /usr/local/payara5/glassfish/domains/domain1/logs/mdc/counter_(yyyy-mm-dd).log
+log_name_pattern: /usr/local/payara6/glassfish/domains/domain1/logs/mdc/counter_(yyyy-mm-dd).log
 
 # path_types regular expressions allow matching to classify page urls as either an investigation or request
 # based on specific URL structure for your system.
diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv
index b07ea8c4fd1..4f4c29d0670 100644
--- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv
+++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv
@@ -1,6 +1,7 @@
-Tool	Type	Scope	Description
-Data Explorer	explore	file	A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse. 
-Whole Tale	explore	dataset	A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide <https://wholetale.readthedocs.io/en/stable/users_guide/integration.html>`_.
-Binder	explore	dataset	Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions <https://github.com/data-exp-lab/girder_ythub/issues/10>`_ are in the Data Exploration Lab girder_ythub project.
-File Previewers	explore	file	A set of tools that display the content of files - including audio, html, `Hypothes.is <https://hypothes.is/>`_ annotations, images, PDF, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers
-Data Curation Tool	configure	file	A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions.
+Tool	Type	Scope	Description
+Data Explorer	explore	file	"A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse."
+Whole Tale	explore	dataset	"A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide <https://wholetale.readthedocs.io/en/stable/users_guide/integration.html>`_."
+Binder	explore	dataset	Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions <https://github.com/data-exp-lab/girder_ythub/issues/10>`_ are in the Data Exploration Lab girder_ythub project.
+File Previewers	explore	file	"A set of tools that display the content of files - including audio, html, `Hypothes.is <https://hypothes.is/>`_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers"
+Data Curation Tool	configure	file	"A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions."
+Ask the Data	query	file	Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. 
diff --git a/doc/sphinx-guides/source/_static/api/dataset-schema.json b/doc/sphinx-guides/source/_static/api/dataset-schema.json
new file mode 100644
index 00000000000..34b8a1eeedb
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/api/dataset-schema.json
@@ -0,0 +1,122 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "$defs": {
+    "field": {
+        "type": "object",
+        "required": ["typeClass", "multiple", "typeName"],
+        "properties": {
+            "value": {
+                "anyOf": [
+                    {
+                        "type": "array"
+                    },
+                    {
+                        "type": "string"
+                    },
+                    {
+                        "$ref": "#/$defs/field"
+                    }
+                ]
+            },
+            "typeClass": {
+                "type": "string"
+            },
+            "multiple": {
+                "type": "boolean"
+            },
+            "typeName": {
+                "type": "string"
+            }
+        }
+    }
+},
+"type": "object",
+"properties": {
+    "datasetVersion": {
+        "type": "object",
+        "properties": {
+           "license": {
+                "type": "object",
+                "properties": {
+                    "name": {
+                        "type": "string"
+                    },
+                    "uri": {
+                        "type": "string",
+                        "format": "uri"
+                   }
+                },
+                "required": ["name", "uri"]
+            },
+            "metadataBlocks": {
+                "type": "object",
+               "properties": {
+                           "citation": {
+                            "type": "object",
+                            "properties": {
+                                "fields": {
+                                    "type": "array",
+                                    "items": {
+                                        "$ref": "#/$defs/field"
+                                    },
+                                    "minItems": 5,
+                                    "allOf": [
+                                        {
+                                            "contains": {
+                                                "properties": {
+                                                    "typeName": {
+                                                        "const": "title"
+                                                    }
+                                                }
+                                            }
+                                        },
+                                        {
+                                            "contains": {
+                                                "properties": {
+                                                    "typeName": {
+                                                        "const": "author"
+                                                    }
+                                                }
+                                            }
+                                        },
+                                        {
+                                            "contains": {
+                                                "properties": {
+                                                    "typeName": {
+                                                        "const": "datasetContact"
+                                                    }
+                                                }
+                                            }
+                                        },
+                                        {
+                                            "contains": {
+                                                "properties": {
+                                                    "typeName": {
+                                                        "const": "dsDescription"
+                                                    }
+                                                }
+                                            }
+                                        },
+                                        {
+                                            "contains": {
+                                                "properties": {
+                                                    "typeName": {
+                                                        "const": "subject"
+                                                    }
+                                                }
+                                            }
+                                        }
+                                    ]
+                                }
+                            },
+                            "required": ["fields"]
+                        }
+                     },
+                    "required": ["citation"]
+                }
+            },
+            "required": ["metadataBlocks"]
+        }
+    },
+    "required": ["datasetVersion"]
+}
diff --git a/doc/sphinx-guides/source/_static/api/dataset-update-metadata.json b/doc/sphinx-guides/source/_static/api/dataset-update-metadata.json
index 6e499d4e164..dcb3e136907 100644
--- a/doc/sphinx-guides/source/_static/api/dataset-update-metadata.json
+++ b/doc/sphinx-guides/source/_static/api/dataset-update-metadata.json
@@ -1,4 +1,8 @@
 {
+  "license": {
+    "name": "CC0 1.0",
+    "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+  },
   "metadataBlocks": {
     "citation": {
       "displayName": "Citation Metadata",
diff --git a/doc/sphinx-guides/source/_static/api/ddi_dataset.xml b/doc/sphinx-guides/source/_static/api/ddi_dataset.xml
index 679f82a3d8a..3b155fc7e55 100644
--- a/doc/sphinx-guides/source/_static/api/ddi_dataset.xml
+++ b/doc/sphinx-guides/source/_static/api/ddi_dataset.xml
@@ -52,8 +52,12 @@
         <depDate>1002-01-01</depDate>
       </distStmt>
       <serStmt>
-        <serName>SeriesName</serName>
-        <serInfo>SeriesInformation</serInfo>
+        <serName>SeriesName One</serName>
+        <serInfo>SeriesInformation One</serInfo>
+      </serStmt>
+      <serStmt>
+        <serName>SeriesName Two</serName>
+        <serInfo>SeriesInformation Two</serInfo>
       </serStmt>
     </citation>
     <stdyInfo>
diff --git a/scripts/vagrant/counter-processor-config.yaml b/doc/sphinx-guides/source/_static/developers/counter-processor-config.yaml
similarity index 100%
rename from scripts/vagrant/counter-processor-config.yaml
rename to doc/sphinx-guides/source/_static/developers/counter-processor-config.yaml
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root
index 1de94331523..b9ef9960318 100755
--- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.root
@@ -4,7 +4,7 @@
 
 set -e
 
-ASADMIN=/usr/local/payara5/bin/asadmin
+ASADMIN=/usr/local/payara6/bin/asadmin
 
 case "$1" in
   start)
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service
index 7c457e615d8..19bb190e740 100755
--- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/payara.init.service
@@ -3,7 +3,7 @@
 # description: Payara App Server
 set -e
 
-ASADMIN=/usr/local/payara5/bin/asadmin
+ASADMIN=/usr/local/payara6/bin/asadmin
 APP_SERVER_USER=dataverse
 
 case "$1" in
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
index 7ca04cdff3f..f7dba504e70 100755
--- a/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr
@@ -5,9 +5,9 @@
 # chkconfig: 35 92 08
 # description: Starts and stops Apache Solr
 
-SOLR_DIR="/usr/local/solr/solr-8.11.1"
+SOLR_DIR="/usr/local/solr/solr-9.3.0"
 SOLR_COMMAND="bin/solr"
-SOLR_ARGS="-m 1g -j jetty.host=127.0.0.1"
+SOLR_ARGS="-m 1g"
 SOLR_USER=solr
 
 case $1 in
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service
index c8c82f6d6b2..c8efcb9c6f9 100644
--- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/payara.service
@@ -4,9 +4,9 @@ After = syslog.target network.target
 
 [Service]
 Type = forking
-ExecStart = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar start-domain
-ExecStop = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar stop-domain
-ExecReload = /usr/bin/java -jar /usr/local/payara5/glassfish/lib/client/appserver-cli.jar restart-domain
+ExecStart = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar start-domain
+ExecStop = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar stop-domain
+ExecReload = /usr/bin/java -jar /usr/local/payara6/glassfish/lib/client/appserver-cli.jar restart-domain
 User=dataverse
 LimitNOFILE=32768
 Environment="LANG=en_US.UTF-8"
diff --git a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
index d89ee108377..2ceeb0016d6 100644
--- a/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
+++ b/doc/sphinx-guides/source/_static/installation/files/etc/systemd/solr.service
@@ -5,9 +5,9 @@ After = syslog.target network.target remote-fs.target nss-lookup.target
 [Service]
 User = solr
 Type = forking
-WorkingDirectory = /usr/local/solr/solr-8.11.1
-ExecStart = /usr/local/solr/solr-8.11.1/bin/solr start -m 1g -j "jetty.host=127.0.0.1"
-ExecStop = /usr/local/solr/solr-8.11.1/bin/solr stop
+WorkingDirectory = /usr/local/solr/solr-9.3.0
+ExecStart = /usr/local/solr/solr-9.3.0/bin/solr start -m 1g
+ExecStop = /usr/local/solr/solr-9.3.0/bin/solr stop
 LimitNOFILE=65000
 LimitNPROC=65000
 Restart=on-failure
diff --git a/doc/sphinx-guides/source/_static/installation/files/root/auth-providers/oidc.json b/doc/sphinx-guides/source/_static/installation/files/root/auth-providers/oidc.json
new file mode 100644
index 00000000000..9df38988a25
--- /dev/null
+++ b/doc/sphinx-guides/source/_static/installation/files/root/auth-providers/oidc.json
@@ -0,0 +1,8 @@
+{
+    "id":"<a unique id>",
+    "factoryAlias":"oidc",
+    "title":"<a title - shown in UI>",
+    "subtitle":"<a subtitle - currently unused in UI>",
+    "factoryData":"type: oidc | issuer: <issuer url> | clientId: <client id> | clientSecret: <client secret> | pkceEnabled: <true/false> | pkceMethod: <PLAIN/S256/...>",
+    "enabled":true
+}
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json
index 47413c8a625..22dd6477cb4 100644
--- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json
+++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/dynamicDatasetTool.json
@@ -14,14 +14,14 @@
       {
         "locale":"{localeCode}"
       }
-    ],
-    "allowedApiCalls": [
-        {
-          "name":"retrieveDatasetJson",
-          "httpMethod":"GET",
-          "urlTemplate":"/api/v1/datasets/{datasetId}",
-          "timeOut":10
-        }
-      ]
-  }
+    ]
+  },
+  "allowedApiCalls": [
+    {
+      "name":"retrieveDatasetJson",
+      "httpMethod":"GET",
+      "urlTemplate":"/api/v1/datasets/{datasetId}",
+      "timeOut":10
+    }
+  ]
 }
diff --git a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json
index 1c132576099..2b6a0b8e092 100644
--- a/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json
+++ b/doc/sphinx-guides/source/_static/installation/files/root/external-tools/fabulousFileTool.json
@@ -21,14 +21,14 @@
       {
         "locale":"{localeCode}"
       }
-    ],
-    "allowedApiCalls": [
-      {
-        "name":"retrieveDataFile",
-        "httpMethod":"GET",
-        "urlTemplate":"/api/v1/access/datafile/{fileId}",
-        "timeOut":270
-      }
     ]
-  }
+  },
+  "allowedApiCalls": [
+    {
+      "name":"retrieveDataFile",
+      "httpMethod":"GET",
+      "urlTemplate":"/api/v1/access/datafile/{fileId}",
+      "timeOut":270
+    }
+  ]
 }
diff --git a/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties b/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties
deleted file mode 100644
index 4054c794452..00000000000
--- a/doc/sphinx-guides/source/_static/installation/files/usr/local/payara5/glassfish/domains/domain1/config/logging.properties
+++ /dev/null
@@ -1,166 +0,0 @@
-#
-# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
-#
-# Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved.
-#
-# The contents of this file are subject to the terms of either the GNU
-# General Public License Version 2 only ("GPL") or the Common Development
-# and Distribution License("CDDL") (collectively, the "License").  You
-# may not use this file except in compliance with the License.  You can
-# obtain a copy of the License at
-# https://glassfish.dev.java.net/public/CDDL+GPL_1_1.html
-# or packager/legal/LICENSE.txt.  See the License for the specific
-# language governing permissions and limitations under the License.
-#
-# When distributing the software, include this License Header Notice in each
-# file and include the License file at packager/legal/LICENSE.txt.
-#
-# GPL Classpath Exception:
-# Oracle designates this particular file as subject to the "Classpath"
-# exception as provided by Oracle in the GPL Version 2 section of the License
-# file that accompanied this code.
-#
-# Modifications:
-# If applicable, add the following below the License Header, with the fields
-# enclosed by brackets [] replaced by your own identifying information:
-# "Portions Copyright [year] [name of copyright owner]"
-#
-# Contributor(s):
-# If you wish your version of this file to be governed by only the CDDL or
-# only the GPL Version 2, indicate your decision by adding "[Contributor]
-# elects to include this software in this distribution under the [CDDL or GPL
-# Version 2] license."  If you don't indicate a single choice of license, a
-# recipient has the option to distribute your version of this file under
-# either the CDDL, the GPL Version 2 or to extend the choice of license to
-# its licensees as provided above.  However, if you add GPL Version 2 code
-# and therefore, elected the GPL Version 2 license, then the option applies
-# only if the new code is made subject to such option by the copyright
-# holder.
-#
-# Portions Copyright [2016-2021] [Payara Foundation and/or its affiliates]
-
-#GlassFish logging.properties list
-#Update June 13 2012
-
-#All attributes details
-handlers=java.util.logging.ConsoleHandler
-handlerServices=com.sun.enterprise.server.logging.GFFileHandler,com.sun.enterprise.server.logging.SyslogHandler
-java.util.logging.ConsoleHandler.formatter=com.sun.enterprise.server.logging.UniformLogFormatter
-java.util.logging.FileHandler.count=1
-java.util.logging.FileHandler.formatter=java.util.logging.XMLFormatter
-java.util.logging.FileHandler.limit=50000
-java.util.logging.FileHandler.pattern=%h/java%u.log
-com.sun.enterprise.server.logging.GFFileHandler.compressOnRotation=false
-com.sun.enterprise.server.logging.GFFileHandler.excludeFields=
-com.sun.enterprise.server.logging.GFFileHandler.file=${com.sun.aas.instanceRoot}/logs/server.log
-com.sun.enterprise.server.logging.GFFileHandler.flushFrequency=1
-com.sun.enterprise.server.logging.GFFileHandler.formatter=com.sun.enterprise.server.logging.ODLLogFormatter
-com.sun.enterprise.server.logging.GFFileHandler.level=ALL
-com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true
-com.sun.enterprise.server.logging.GFFileHandler.logtoConsole=false
-com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true
-com.sun.enterprise.server.logging.GFFileHandler.maxHistoryFiles=0
-com.sun.enterprise.server.logging.GFFileHandler.multiLineMode=true
-com.sun.enterprise.server.logging.GFFileHandler.retainErrorsStasticsForHours=0
-com.sun.enterprise.server.logging.GFFileHandler.rotationLimitInBytes=2000000
-com.sun.enterprise.server.logging.GFFileHandler.rotationOnDateChange=false
-com.sun.enterprise.server.logging.GFFileHandler.rotationTimelimitInMinutes=0
-com.sun.enterprise.server.logging.SyslogHandler.level=ALL
-com.sun.enterprise.server.logging.SyslogHandler.useSystemLogging=false
-log4j.logger.org.hibernate.validator.util.Version=warn
-com.sun.enterprise.server.logging.UniformLogFormatter.ansiColor=true
-
-#Payara Notification logging properties
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.compressOnRotation=false
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.file=${com.sun.aas.instanceRoot}/logs/notification.log
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.formatter=com.sun.enterprise.server.logging.ODLLogFormatter
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.logtoFile=true
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.maxHistoryFiles=0
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationLimitInBytes=2000000
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationOnDateChange=false
-fish.payara.enterprise.server.logging.PayaraNotificationFileHandler.rotationTimelimitInMinutes=0
-fish.payara.deprecated.jsonlogformatter.underscoreprefix=false
-
-#All log level details
-
-.level=INFO
-ShoalLogger.level=CONFIG
-com.hazelcast.level=WARNING
-java.util.logging.ConsoleHandler.level=FINEST
-javax.enterprise.resource.corba.level=INFO
-javax.enterprise.resource.javamail.level=INFO
-javax.enterprise.resource.jdo.level=INFO
-javax.enterprise.resource.jms.level=INFO
-javax.enterprise.resource.jta.level=INFO
-javax.enterprise.resource.resourceadapter.level=INFO
-javax.enterprise.resource.sqltrace.level=FINE
-javax.enterprise.resource.webcontainer.jsf.application.level=INFO
-javax.enterprise.resource.webcontainer.jsf.config.level=INFO
-javax.enterprise.resource.webcontainer.jsf.context.level=INFO
-javax.enterprise.resource.webcontainer.jsf.facelets.level=INFO
-javax.enterprise.resource.webcontainer.jsf.lifecycle.level=INFO
-javax.enterprise.resource.webcontainer.jsf.managedbean.level=INFO
-javax.enterprise.resource.webcontainer.jsf.renderkit.level=INFO
-javax.enterprise.resource.webcontainer.jsf.resource.level=INFO
-javax.enterprise.resource.webcontainer.jsf.taglib.level=INFO
-javax.enterprise.resource.webcontainer.jsf.timing.level=INFO
-javax.enterprise.system.container.cmp.level=INFO
-javax.enterprise.system.container.ejb.level=INFO
-javax.enterprise.system.container.ejb.mdb.level=INFO
-javax.enterprise.system.container.web.level=INFO
-javax.enterprise.system.core.classloading.level=INFO
-javax.enterprise.system.core.config.level=INFO
-javax.enterprise.system.core.level=INFO
-javax.enterprise.system.core.security.level=INFO
-javax.enterprise.system.core.selfmanagement.level=INFO
-javax.enterprise.system.core.transaction.level=INFO
-javax.enterprise.system.level=INFO
-javax.enterprise.system.ssl.security.level=INFO
-javax.enterprise.system.tools.admin.level=INFO
-javax.enterprise.system.tools.backup.level=INFO
-javax.enterprise.system.tools.deployment.common.level=WARNING
-javax.enterprise.system.tools.deployment.dol.level=WARNING
-javax.enterprise.system.tools.deployment.level=INFO
-javax.enterprise.system.util.level=INFO
-javax.enterprise.system.webservices.registry.level=INFO
-javax.enterprise.system.webservices.rpc.level=INFO
-javax.enterprise.system.webservices.saaj.level=INFO
-javax.level=INFO
-javax.mail.level=INFO
-javax.org.glassfish.persistence.level=INFO
-org.apache.catalina.level=INFO
-org.apache.coyote.level=INFO
-org.apache.jasper.level=INFO
-org.eclipse.persistence.session.level=INFO
-org.glassfish.admingui.level=INFO
-org.glassfish.naming.level=INFO
-org.jvnet.hk2.osgiadapter.level=INFO
-
-javax.enterprise.resource.corba.level=INFO
-javax.enterprise.resource.jta.level=INFO
-javax.enterprise.system.webservices.saaj.level=INFO
-javax.enterprise.system.container.ejb.level=INFO
-javax.enterprise.system.container.ejb.mdb.level=INFO
-javax.enterprise.resource.javamail.level=INFO
-javax.enterprise.system.webservices.rpc.level=INFO
-javax.enterprise.system.container.web.level=INFO
-javax.enterprise.resource.jms.level=INFO
-javax.enterprise.system.webservices.registry.level=INFO
-javax.enterprise.resource.webcontainer.jsf.application.level=INFO
-javax.enterprise.resource.webcontainer.jsf.resource.level=INFO
-javax.enterprise.resource.webcontainer.jsf.config.level=INFO
-javax.enterprise.resource.webcontainer.jsf.context.level=INFO
-javax.enterprise.resource.webcontainer.jsf.facelets.level=INFO
-javax.enterprise.resource.webcontainer.jsf.lifecycle.level=INFO
-javax.enterprise.resource.webcontainer.jsf.managedbean.level=INFO
-javax.enterprise.resource.webcontainer.jsf.renderkit.level=INFO
-javax.enterprise.resource.webcontainer.jsf.taglib.level=INFO
-javax.enterprise.resource.webcontainer.jsf.timing.level=INFO
-javax.org.glassfish.persistence.level=INFO
-javax.enterprise.system.tools.backup.level=INFO
-javax.mail.level=INFO
-org.glassfish.admingui.level=INFO
-org.glassfish.naming.level=INFO
-org.eclipse.persistence.session.level=INFO
-javax.enterprise.system.tools.deployment.dol.level=WARNING
-javax.enterprise.system.tools.deployment.common.level=WARNING
diff --git a/doc/sphinx-guides/source/_static/navbarscroll.js b/doc/sphinx-guides/source/_static/navbarscroll.js
index 66c9d4d7995..735f80870cd 100644
--- a/doc/sphinx-guides/source/_static/navbarscroll.js
+++ b/doc/sphinx-guides/source/_static/navbarscroll.js
@@ -1,6 +1,6 @@
 /*  
     Use to fix hidden section headers behind the navbar when using links with targets
-    See: http://stackoverflow.com/questions/10732690/offsetting-an-html-anchor-to-adjust-for-fixed-header    
+    See: https://stackoverflow.com/questions/10732690/offsetting-an-html-anchor-to-adjust-for-fixed-header    
 */
 $jqTheme(document).ready(function() {
   $jqTheme('a[href*="#"]:not([href="#"])').on('click', function() {
diff --git a/doc/sphinx-guides/source/_static/util/clear_timer.sh b/doc/sphinx-guides/source/_static/util/clear_timer.sh
index 1d9966e4e07..641b2695084 100755
--- a/doc/sphinx-guides/source/_static/util/clear_timer.sh
+++ b/doc/sphinx-guides/source/_static/util/clear_timer.sh
@@ -8,7 +8,7 @@
 # if you'd like to avoid that.
 
 # directory where Payara is installed
-PAYARA_DIR=/usr/local/payara5
+PAYARA_DIR=/usr/local/payara6
 
 # directory within Payara (defaults)
 DV_DIR=${PAYARA_DIR}/glassfish/domains/domain1
diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh
index a12439d9cf8..674972b18f2 100644
--- a/doc/sphinx-guides/source/_static/util/counter_daily.sh
+++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh
@@ -1,7 +1,7 @@
 #! /bin/bash
 
 COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.1.04"
-MDC_LOG_DIRECTORY="/usr/local/payara5/glassfish/domains/domain1/logs/mdc"
+MDC_LOG_DIRECTORY="/usr/local/payara6/glassfish/domains/domain1/logs/mdc"
 
 # counter_daily.sh
 
diff --git a/doc/sphinx-guides/source/_templates/navbar.html b/doc/sphinx-guides/source/_templates/navbar.html
index 538cccf74d7..c7b81dcb937 100644
--- a/doc/sphinx-guides/source/_templates/navbar.html
+++ b/doc/sphinx-guides/source/_templates/navbar.html
@@ -15,7 +15,7 @@
                 <span class="icon-bar"></span>
                 <span class="icon-bar"></span>
             </button>
-            <a class="navbar-brand" href="http://dataverse.org" id="dataverse-org-homepage-url">Dataverse Project</a>
+            <a class="navbar-brand" href="https://dataverse.org" id="dataverse-org-homepage-url">Dataverse Project</a>
         </div>
         <!-- Collect the nav links, forms, and other content for toggling -->
 
@@ -24,15 +24,15 @@
                 <li class="dropdown">
                     <a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">About <span class="caret"></span></a>
                     <ul class="dropdown-menu">
-                        <li><a target="_blank" href="http://dataverse.org/about">About the Project</a></li>
-                        <li><a target="_blank" href="http://dataverse.org/add-data">Add Data</a></li>
-                        <li><a target="_blank" href="http://dataverse.org/blog">Blog</a></li>
-                        <li><a target="_blank" href="http://dataverse.org/presentations">Presentations</a></li>
-                        <li><a target="_blank" href="http://dataverse.org/publications">Publications</a></li>
+                        <li><a target="_blank" href="https://dataverse.org/about">About the Project</a></li>
+                        <li><a target="_blank" href="https://dataverse.org/add-data">Add Data</a></li>
+                        <li><a target="_blank" href="https://dataverse.org/blog">Blog</a></li>
+                        <li><a target="_blank" href="https://dataverse.org/presentations">Presentations</a></li>
+                        <li><a target="_blank" href="https://dataverse.org/publications">Publications</a></li>
                     </ul>
                 </li>
                 <li>
-                    <a target="_blank" href="http://dataverse.org/">
+                    <a target="_blank" href="https://dataverse.org/">
                         Community
                     </a>
                 </li>
@@ -49,18 +49,18 @@
                 <li>
                     <a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">Software <span class="caret"></span></a>
                     <ul class="dropdown-menu">
-                        <li><a target="_blank" href="http://dataverse.org/software-features">Features</a></li>
-                        <li><a target="_blank" href="http://github.com/IQSS/dataverse">Source Code</a></li>
-                        <li><a target="_blank" href="http://guides.dataverse.org/en/latest/user/index.html">User Guide</a></li>
-                        <li><a target="_blank" href="http://guides.dataverse.org/en/latest/installation/index.html">Installation Guide</a></li>
-                        <li><a target="_blank" href="http://guides.dataverse.org/en/latest/api/index.html">API Guide</a></li>
-                        <li><a target="_blank" href="http://guides.dataverse.org/en/latest/developers/index.html">Developer Guide</a></li>
-                        <li><a target="_blank" href="http://guides.dataverse.org/en/latest/style/index.html">Style Guide</a></li>
-                        <li><a target="_blank" href="http://guides.dataverse.org/en/latest/admin/index.html">Admin Guide</a></li>
+                        <li><a target="_blank" href="https://dataverse.org/software-features">Features</a></li>
+                        <li><a target="_blank" href="https://github.com/IQSS/dataverse">Source Code</a></li>
+                        <li><a target="_blank" href="https://guides.dataverse.org/en/latest/user/index.html">User Guide</a></li>
+                        <li><a target="_blank" href="https://guides.dataverse.org/en/latest/installation/index.html">Installation Guide</a></li>
+                        <li><a target="_blank" href="https://guides.dataverse.org/en/latest/api/index.html">API Guide</a></li>
+                        <li><a target="_blank" href="https://guides.dataverse.org/en/latest/developers/index.html">Developer Guide</a></li>
+                        <li><a target="_blank" href="https://guides.dataverse.org/en/latest/style/index.html">Style Guide</a></li>
+                        <li><a target="_blank" href="https://guides.dataverse.org/en/latest/admin/index.html">Admin Guide</a></li>
                     </ul>
                 </li>
                 <li>
-                    <a target="_blank" href="http://dataverse.org/">
+                    <a target="_blank" href="https://dataverse.org/">
                         Contact
                     </a>
                 </li>
diff --git a/doc/sphinx-guides/source/admin/collectionquotas.rst b/doc/sphinx-guides/source/admin/collectionquotas.rst
new file mode 100644
index 00000000000..2ce3132e2ba
--- /dev/null
+++ b/doc/sphinx-guides/source/admin/collectionquotas.rst
@@ -0,0 +1,19 @@
+
+Storage Quotas for Collections
+==============================
+
+Please note that this is a new and still experimental feature (as of Dataverse v6.1 release).
+
+Instance admins can now define storage quota limits for specific collections. These limits can be set, changed and/or deleted via the provided APIs (please see the :ref:`collection-storage-quotas` section of the :doc:`/api/native-api` guide). The Read version of the API is available to the individual collection admins (i.e., a collection owner can check on the quota configured for their collection), but only superusers can set, change or disable storage quotas.
+
+Storage quotas are *inherited* by subcollections. In other words, when storage use limit is set for a specific collection, it applies to all the datasets immediately under it and in its sub-collections, unless different quotas are defined there and so on. Each file added to any dataset in that hierarchy counts for the purposes of the quota limit defined for the top collection. A storage quota defined on a child sub-collection overrides whatever quota that may be defined on the parent, or inherited from an ancestor.
+
+For example, a collection ``A`` has the storage quota set to 10GB. It has 3 sub-collections, ``B``, ``C`` and ``D``. Users can keep uploading files into the datasets anywhere in this hierarchy until the combined size of 10GB is reached between them. However, if an admin has reasons to limit one of the sub-collections, ``B`` to 3GB only, that quota can be explicitly set there. This both limits the growth of ``B`` to 3GB, and also *guarantees* that allocation to it. I.e. the contributors to collection ``B`` will be able to keep adding data until the 3GB limit is reached, even after the parent collection ``A`` reaches the combined 10GB limit (at which point ``A`` and all its subcollections except for ``B`` will become read-only).
+
+We do not yet know whether this is going to be a popular, or needed use case - a child collection quota that is different from the quota it inherits from a parent. It is likely that for many instances it will be sufficient to be able to define quotas for collections and have them apply to all the child objects underneath. We will examine the response to this feature and consider making adjustments to this scheme based on it. We are already considering introducing other types of quotas, such as limits by users or specific storage volumes.  
+
+Please note that only the sizes of the main datafiles and the archival tab-delimited format versions, as produced by the ingest process are counted for the purposes of enforcing the limits. Automatically generated "auxiliary" files, such as rescaled image thumbnails and metadata exports for datasets are not.
+
+When quotas are set and enforced, the users will be informed of the remaining storage allocation on the file upload page together with other upload and processing limits.
+
+Part of the new and experimental nature of this feature is that we don't know for the fact yet how well it will function in real life on a very busy production system, despite our best efforts to test it prior to the release. One specific issue is having to update the recorded storage use for every parent collection of the given dataset whenever new files are added. This includes updating the combined size of the root, top collection - which will need to be updated after *every* file upload. In an unlikely case that this will start causing problems with race conditions and database update conflicts, it is possible to disable these updates (and thus disable the storage quotas feature), by setting the :ref:`dataverse.storageuse.disable-storageuse-increments` JVM setting to true.
diff --git a/doc/sphinx-guides/source/admin/dataverses-datasets.rst b/doc/sphinx-guides/source/admin/dataverses-datasets.rst
index 7f32e8c2514..37494c57fa1 100644
--- a/doc/sphinx-guides/source/admin/dataverses-datasets.rst
+++ b/doc/sphinx-guides/source/admin/dataverses-datasets.rst
@@ -53,11 +53,15 @@ Configure a Dataverse Collection to Store All New Files in a Specific File Store
 To direct new files (uploaded when datasets are created or edited) for all datasets in a given Dataverse collection, the store can be specified via the API as shown below, or by editing the 'General Information' for a Dataverse collection on the Dataverse collection page. Only accessible to superusers. ::
  
     curl -H "X-Dataverse-key: $API_TOKEN" -X PUT -d $storageDriverLabel http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver
+
+(Note that for ``dataverse.files.store1.label=MyLabel``, you should pass ``MyLabel``.)
     
 The current driver can be seen using::
 
     curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver
 
+(Note that for ``dataverse.files.store1.label=MyLabel``, ``store1`` will be returned.)
+
 and can be reset to the default store with::
 
     curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE http://$SERVER/api/admin/dataverse/$dataverse-alias/storageDriver
@@ -118,6 +122,28 @@ Creates a link between a dataset and a Dataverse collection (see the :ref:`datas
 
     curl -H "X-Dataverse-key: $API_TOKEN" -X PUT http://$SERVER/api/datasets/$linked-dataset-id/link/$linking-dataverse-alias
 
+List Collections that are Linked from a Dataset
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Lists the link(s) created between a dataset and a Dataverse collection (see the :ref:`dataset-linking` section of the User Guide for more information). ::
+
+    curl -H "X-Dataverse-key: $API_TOKEN" http://$SERVER/api/datasets/$linked-dataset-id/links
+
+It returns a list in the following format:
+
+.. code-block:: json
+
+  {
+    "status": "OK",
+    "data": {
+      "dataverses that link to dataset id 56782": [
+        "crc990 (id 18802)"
+      ]
+    }
+  }
+
+.. _unlink-a-dataset:
+
 Unlink a Dataset
 ^^^^^^^^^^^^^^^^
 
@@ -131,15 +157,35 @@ Mint a PID for a File That Does Not Have One
 In the following example, the database id of the file is 42::
 
     export FILE_ID=42
-    curl http://localhost:8080/api/admin/$FILE_ID/registerDataFile
+    curl "http://localhost:8080/api/admin/$FILE_ID/registerDataFile"
+    
+This method will return a FORBIDDEN response if minting of file PIDs is not enabled for the collection the file is in. (Note that it is possible to have file PIDs enabled for a specific collection, even when it is disabled for the Dataverse installation as a whole. See :ref:`collection-attributes-api` in the Native API Guide.)
 
-Mint PIDs for Files That Do Not Have Them
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Mint PIDs for all unregistered published files in the specified collection
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-If you have a large number of files, you might want to consider miniting PIDs for files individually using the ``registerDataFile`` endpoint above in a for loop, sleeping between each registration::
+The following API will register the PIDs for all the yet unregistered published files in the datasets **directly within the collection** specified by its alias::
+
+    curl "http://localhost:8080/api/admin/registerDataFiles/{collection_alias}"
+
+It will not attempt to register the datafiles in its sub-collections, so this call will need to be repeated on any sub-collections where files need to be registered as well.
+File-level PID registration must be enabled on the collection. (Note that it is possible to have it enabled for a specific collection, even when it is disabled for the Dataverse installation as a whole. See :ref:`collection-attributes-api` in the Native API Guide.)
+
+This API will sleep for 1 second between registration calls by default. A longer sleep interval can be specified with an optional ``sleep=`` parameter::
+
+      curl "http://localhost:8080/api/admin/registerDataFiles/{collection_alias}?sleep=5"
+
+Mint PIDs for ALL unregistered files in the database
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The following API will attempt to register the PIDs for all the published files in your instance, in collections that allow file PIDs, that do not yet have them::
 
     curl http://localhost:8080/api/admin/registerDataFileAll
 
+The application will attempt to sleep for 1 second between registration attempts as not to overload your persistent identifier service provider. Note that if you have a large number of files that need to be registered in your Dataverse, you may want to consider minting file PIDs within indivdual collections, or even for individual files using the ``registerDataFiles`` and/or ``registerDataFile`` endpoints above in a loop, with a longer sleep interval between calls.
+
+
+
 Mint a New DOI for a Dataset with a Handle
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
diff --git a/doc/sphinx-guides/source/admin/discoverability.rst b/doc/sphinx-guides/source/admin/discoverability.rst
new file mode 100644
index 00000000000..767bb55bce6
--- /dev/null
+++ b/doc/sphinx-guides/source/admin/discoverability.rst
@@ -0,0 +1,76 @@
+Discoverability
+===============
+
+Datasets are made discoverable by a variety of methods.
+
+.. contents:: |toctitle|
+  :local:
+
+DataCite Integration
+--------------------
+
+If you are using `DataCite <https://datacite.org>`_ as your DOI provider, when datasets are published, metadata is pushed to DataCite, where it can be searched. For more information, see :ref:`:DoiProvider` in the Installation Guide.
+
+OAI-PMH (Harvesting)
+--------------------
+
+The Dataverse software supports a protocol called OAI-PMH that facilitates harvesting dataset metadata from one system into another. For details on harvesting, see the :doc:`harvestserver` section.
+
+Machine-Readable Metadata on Dataset Landing Pages
+--------------------------------------------------
+
+As recommended in `A Data Citation Roadmap for Scholarly Data Repositories <https://doi.org/10.1101/097196>`_, the Dataverse software embeds metadata on dataset landing pages in a variety of machine-readable ways. 
+
+Dublin Core HTML Meta Tags
+++++++++++++++++++++++++++
+
+The HTML source of a dataset landing page includes "DC" (Dublin Core) ``<meta>`` tags such as the following::
+
+        <meta name="DC.identifier" content="..."
+        <meta name="DC.type" content="Dataset"
+        <meta name="DC.title" content="..."
+
+Schema.org JSON-LD Metadata
++++++++++++++++++++++++++++
+
+The HTML source of a dataset landing page includes Schema.org JSON-LD metadata like this::
+
+
+        <script type="application/ld+json">{"@context":"http://schema.org","@type":"Dataset","@id":"https://doi.org/...
+
+
+.. _discovery-sign-posting:
+
+Signposting
++++++++++++
+
+The Dataverse software supports `Signposting <https://signposting.org>`_. This allows machines to request more information about a dataset through the `Link <https://tools.ietf.org/html/rfc5988>`_ HTTP header.
+
+There are 2 Signposting profile levels, level 1 and level 2. In this implementation, 
+ * Level 1 links are shown `as recommended <https://signposting.org/FAIR/>`_ in the "Link"
+   HTTP header, which can be fetched by sending an HTTP HEAD request, e.g. ``curl -I https://demo.dataverse.org/dataset.xhtml?persistentId=doi:10.5072/FK2/KPY4ZC``.
+   The number of author and file links in the level 1 header can be configured as described below. 
+ * The level 2 linkset can be fetched by visiting the dedicated linkset page for 
+   that artifact. The link can be seen in level 1 links with key name ``rel="linkset"``.
+
+Note: Authors without author link will not be counted nor shown in any profile/linkset. 
+The following configuration options are available:
+
+- :ref:`dataverse.signposting.level1-author-limit`
+
+  Sets the max number of authors to be shown in `level 1` profile.
+  If the number of authors (with identifier URLs) exceeds this value, no author links will be shown in `level 1` profile.
+  The default is 5.
+
+- :ref:`dataverse.signposting.level1-item-limit`
+
+  Sets the max number of items/files which will be shown in `level 1` profile. Datasets with
+  too many files will not show any file links in `level 1` profile. They will be shown in `level 2` linkset only. 
+  The default is 5.
+
+See also :ref:`signposting-api` in the API Guide.
+
+Additional Discoverability Through Integrations
+-----------------------------------------------
+
+See :ref:`integrations-discovery` in the Integrations section for additional discovery methods you can enable.
diff --git a/doc/sphinx-guides/source/admin/external-tools.rst b/doc/sphinx-guides/source/admin/external-tools.rst
index ad6181a867a..346ca0b15ee 100644
--- a/doc/sphinx-guides/source/admin/external-tools.rst
+++ b/doc/sphinx-guides/source/admin/external-tools.rst
@@ -92,7 +92,15 @@ File Level Preview Tools
 
 File level preview tools allow the user to see a preview of the file contents without having to download it.
 
-When a file has a preview available, a preview icon will appear next to that file in the file listing on the dataset page. On the file page itself, the preview will appear in a Preview tab either immediately or once a guestbook has been filled in or terms, if any, have been agreed to.
+When a file has a preview available, a preview icon will appear next to that file in the file listing on the dataset page. On the file page itself, the preview will appear in a Preview tab (renamed File Tools, if multiple tools are available) either immediately or once a guestbook has been filled in or terms, if any, have been agreed to.
+
+File Level Query Tools
+++++++++++++++++++++++++
+
+File level query tools allow the user to ask questions (e.g. natural language queries) of a data table's contents without having to download it.
+
+When a file has a query tool available, a query icon will appear next to that file in the file listing on the dataset page. On the file page itself, the query tool will appear in a Query tab (renamed File Tools, if multiple tools are available) either immediately or once a guestbook has been filled in or terms, if any, have been agreed to.
+
 
 File Level Configure Tools
 ++++++++++++++++++++++++++
@@ -107,7 +115,7 @@ Dataset level explore tools allow the user to explore all the files in a dataset
 Dataset Level Configure Tools
 +++++++++++++++++++++++++++++
 
-Configure tools at the dataset level are not currently supported.
+Dataset level configure tools can be launched by users who have edit access to the dataset. These tools are found under the "Edit Dataset" menu.
 
 Writing Your Own External Tool
 ------------------------------
diff --git a/doc/sphinx-guides/source/admin/harvestclients.rst b/doc/sphinx-guides/source/admin/harvestclients.rst
index 02783e4b97a..59fc4dc2c64 100644
--- a/doc/sphinx-guides/source/admin/harvestclients.rst
+++ b/doc/sphinx-guides/source/admin/harvestclients.rst
@@ -21,8 +21,11 @@ Clients are managed on the "Harvesting Clients" page accessible via the :doc:`da
 
 The process of creating a new, or editing an existing client, is largely self-explanatory. It is split into logical steps, in a way that allows the user to go back and correct the entries made earlier. The process is interactive and guidance text is provided. For example, the user is required to enter the URL of the remote OAI server. When they click *Next*, the application will try to establish a connection to the server in order to verify that it is working, and to obtain the information about the sets of metadata records and the metadata formats it supports. The choices offered to the user on the next page will be based on this extra information. If the application fails to establish a connection to the remote archive at the address specified, or if an invalid response is received, the user is given an opportunity to check and correct the URL they entered.
 
+Please note that in some rare cases this GUI may fail to create a client because of some unexpected errors during these real time exchanges with an OAI server that is otherwise known to be valid. For example, in the past we have had issues with servers offering very long lists of sets (*really* long, in the thousands). To allow an admin to still be able to create a client in a situation like that, we provide the REST API that will do so without attempting any validation in real time. This obviously makes it the responsibility of the admin to supply the values that are definitely known to be valid - a working OAI url, the name of a set that does exist on the server, and/or a supported metadata format. See the :ref:`managing-harvesting-clients-api` section of the :doc:`/api/native-api` guide for more information.
+
 Note that as of 5.13, a new entry "Custom HTTP Header" has been added to the Step 1. of Create or Edit form. This optional field can be used to configure this client with a specific HTTP header to be added to every OAI request. This is to accommodate a (rare) use case where the remote server may require a special token of some kind in order to offer some content not available to other clients. Most OAI servers offer the same publicly-available content to all clients, so few admins will have a use for this feature. It is however on the very first, Step 1. screen in case the OAI server requires this token even for the "ListSets" and "ListMetadataFormats" requests, which need to be sent in the Step 2. of creating or editing a client. Multiple headers can be supplied separated by `\\n` - actual "backslash" and "n" characters, not a single "new line" character. 
 
+
 How to Stop a Harvesting Run in Progress
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -32,8 +35,8 @@ For example:
 
 .. code-block:: bash
 
-  sudo touch /usr/local/payara5/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916
-  sudo chown dataverse /usr/local/payara5/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916
+  sudo touch /usr/local/payara6/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916
+  sudo chown dataverse /usr/local/payara6/glassfish/domains/domain1/logs/stopharvest_bigarchive.70916
 
 Note: If the application server is stopped and restarted, any running harvesting jobs will be killed but may remain marked as in progress in the database. We thus recommend using the mechanism here to stop ongoing harvests prior to a server restart.
 
@@ -41,6 +44,6 @@ Note: If the application server is stopped and restarted, any running harvesting
 What if a Run Fails?
 ~~~~~~~~~~~~~~~~~~~~
 
-Each harvesting client run logs a separate file per run to the app server's default logging directory (``/usr/local/payara5/glassfish/domains/domain1/logs/`` unless you've changed it). Look for filenames in the format  ``harvest_TARGET_YYYY_MM_DD_timestamp.log`` to get a better idea of what's going wrong.
+Each harvesting client run logs a separate file per run to the app server's default logging directory (``/usr/local/payara6/glassfish/domains/domain1/logs/`` unless you've changed it). Look for filenames in the format  ``harvest_TARGET_YYYY_MM_DD_timestamp.log`` to get a better idea of what's going wrong.
 
 Note that you'll want to run a minimum of Dataverse Software 4.6, optimally 4.18 or beyond, for the best OAI-PMH interoperability.
diff --git a/doc/sphinx-guides/source/admin/harvestserver.rst b/doc/sphinx-guides/source/admin/harvestserver.rst
index 6f4f23fc587..773e048aa76 100644
--- a/doc/sphinx-guides/source/admin/harvestserver.rst
+++ b/doc/sphinx-guides/source/admin/harvestserver.rst
@@ -18,7 +18,7 @@ If you want to learn more about OAI-PMH, you could take a look at
 or the `OAI-PMH protocol definition <https://www.openarchives.org/OAI/openarchivesprotocol.html>`_.
 
 You might consider adding your OAI-enabled Dataverse installation to
-`this shared list <https://docs.google.com/spreadsheets/d/12cxymvXCqP_kCsLKXQD32go79HBWZ1vU_tdG4kvP5S8/>`_
+`this shared list <https://docs.google.com/spreadsheets/d/1bfsw7gnHlHerLXuk7YprUT68liHfcaMxs1rFciA-mEo/>`_
 of such instances.
 
 The email portion of :ref:`systemEmail` will be visible via OAI-PMH (from the "Identify" verb).
diff --git a/doc/sphinx-guides/source/admin/index.rst b/doc/sphinx-guides/source/admin/index.rst
index b97d9161d50..633842044b4 100755
--- a/doc/sphinx-guides/source/admin/index.rst
+++ b/doc/sphinx-guides/source/admin/index.rst
@@ -14,6 +14,7 @@ This guide documents the functionality only available to superusers (such as "da
 
    dashboard
    external-tools
+   discoverability
    harvestclients
    harvestserver
    metadatacustomization
@@ -26,6 +27,7 @@ This guide documents the functionality only available to superusers (such as "da
    solr-search-index
    ip-groups
    mail-groups
+   collectionquotas
    monitoring
    reporting-tools-and-queries
    maintenance
diff --git a/doc/sphinx-guides/source/admin/integrations.rst b/doc/sphinx-guides/source/admin/integrations.rst
index 1888fd89761..db566106b49 100644
--- a/doc/sphinx-guides/source/admin/integrations.rst
+++ b/doc/sphinx-guides/source/admin/integrations.rst
@@ -14,10 +14,14 @@ A variety of integrations are oriented toward making it easier for your research
 GitHub
 ++++++
 
-Dataverse integration with GitHub is implemented via a Dataverse Uploader GitHub Action. It is a reusable, composite workflow for uploading a git repository or subdirectory into a dataset on a target Dataverse installation. The action is customizable, allowing users to choose to replace a dataset, add to the dataset, publish it or leave it as a draft version on Dataverse. The action provides some metadata to the dataset, such as the origin GitHub repository, and it preserves the directory tree structure. 
+GitHub can be integrated with a Dataverse installation in multiple ways.
+
+One Dataverse integration is implemented via a Dataverse Uploader GitHub Action. It is a reusable, composite workflow for uploading a git repository or subdirectory into a dataset on a target Dataverse installation. The action is customizable, allowing users to choose to replace a dataset, add to the dataset, publish it or leave it as a draft version in the Dataverse installation. The action provides some metadata to the dataset, such as the origin GitHub repository, and it preserves the directory tree structure. 
 
 For instructions on using Dataverse Uploader GitHub Action, visit https://github.com/marketplace/actions/dataverse-uploader-action
 
+In addition to the Dataverse Uploader GitHub Action, the :ref:`integrations-dashboard` also enables a pull of data from GitHub to a dataset.
+
 Dropbox
 +++++++
 
@@ -28,7 +32,11 @@ Open Science Framework (OSF)
 
 The Center for Open Science's Open Science Framework (OSF) is an open source software project that facilitates open collaboration in science research across the lifespan of a scientific project.
 
-For instructions on depositing data from OSF to your Dataverse installation, your researchers can visit https://help.osf.io/hc/en-us/articles/360019737314-Connect-Dataverse-to-a-Project
+OSF can be integrated with a Dataverse installation in multiple ways.
+
+Researcher can configure OSF itself to deposit to your Dataverse installation by following `instructions from OSF <https://help.osf.io/article/208-connect-dataverse-to-a-project>`_.
+
+In addition to the method mentioned above, the :ref:`integrations-dashboard` also enables a pull of data from OSF to a dataset.
 
 RSpace
 ++++++
@@ -77,6 +85,53 @@ SampleDB is a web-based electronic lab notebook (ELN) with a focus on flexible m
 
 For instructions on using the Dataverse export, you can visit https://scientific-it-systems.iffgit.fz-juelich.de/SampleDB/administrator_guide/dataverse_export.html
 
+RedCap
+++++++
+
+RedCap is a web-based application to capture data for clinical research and create databases and projects.
+
+The :ref:`integrations-dashboard` enables a pull of data from RedCap to a dataset in Dataverse.
+
+GitLab
+++++++
+
+GitLab is an open source Git repository and platform that provides free open and private repositories, issue-following capabilities, and wikis for collaborative software development.
+
+The :ref:`integrations-dashboard` enables a pull of data from GitLab to a dataset in Dataverse.
+
+iRODS
++++++
+
+An open source, metadata driven data management system that is accessible through a host of different clients.
+
+The :ref:`integrations-dashboard` enables a pull of data from iRODS to a dataset in Dataverse.
+
+.. _integrations-dashboard:
+
+Integrations Dashboard
+++++++++++++++++++++++
+
+The integrations dashboard is software by the Dataverse community to enable easy data transfer from an existing data management platform to a dataset in a Dataverse collection.
+
+Instead of trying to set up Dataverse plug-ins in existing tools and systems to push data to a Dataverse installation, the dashboard works in reverse by being a portal to pull data from tools such as iRODS and GitHub into a dataset.
+
+Its aim is to make integrations more flexible and less dependent on the cooperation of system to integrate with. You can use it to either create a dataset from scratch and add metadata after files have been transferred, or you can use it to compare what is already in an existing dataset to make updating files in datasets easier.
+
+Its goal is to make the dashboard adjustable for a Dataverse installation's needs and easy to connect other systems to as well.
+
+The integrations dashboard is currently in development. A preview and more information can be found at: `rdm-integration GitHub repository <https://github.com/libis/rdm-integration>`_
+
+Globus
+++++++
+
+Globus transfer uses an efficient transfer mechanism and has additional features that make it suitable for large files and large numbers of files:
+
+* robust file transfer capable of restarting after network or endpoint failures
+* third-party transfer, which enables a user accessing a Dataverse installation in their desktop browser to initiate transfer of their files from a remote endpoint (i.e. on a local high-performance computing cluster), directly to an S3 store managed by the Dataverse installation
+
+Users can transfer files via `Globus <https://www.globus.org>`_ into and out of datasets, or reference files on a remote Globus endpoint, when their Dataverse installation is configured to use a Globus accessible store(s) 
+and a community-developed `dataverse-globus <https://github.com/scholarsportal/dataverse-globus>`_ app has been properly installed and configured.
+
 
 Embedding Data on Websites
 --------------------------
@@ -104,6 +159,8 @@ Compute Button
 
 The "Compute" button is still highly experimental and has special requirements such as use of a Swift object store, but it is documented under "Setting up Compute" in the :doc:`/installation/config` section of the Installation Guide.
 
+.. _wholetale:
+
 Whole Tale
 ++++++++++
 
@@ -111,14 +168,18 @@ Whole Tale
 `import data from a Dataverse installation
 <https://wholetale.readthedocs.io/en/stable/users_guide/manage.html>`_ via identifier (e.g., DOI, URI, etc) or through the External Tools integration.  For installation instructions, see the :doc:`external-tools` section or the `Integration <https://wholetale.readthedocs.io/en/stable/users_guide/integration.html#dataverse-external-tools>`_ section of the Whole Tale User Guide.
 
+.. _binder:
+
 Binder
 ++++++
 
-Researchers can launch Jupyter Notebooks, RStudio, and other computational environments by entering the DOI of a dataset in a Dataverse installation on https://mybinder.org
+Researchers can launch Jupyter Notebooks, RStudio, and other computational environments by entering the DOI of a dataset in a Dataverse installation at https://mybinder.org
 
-A Binder button can also be added to every dataset page to launch Binder from there. See :doc:`external-tools`.
+A Binder button can also be added to every dataset page to launch Binder from there. Instructions on enabling this feature can be found under :doc:`external-tools`.
 
-Institutions can self host BinderHub. The Dataverse Project is one of the supported `repository providers <https://binderhub.readthedocs.io/en/latest/developer/repoproviders.html#supported-repoproviders>`_.
+Additionally, institutions can self host `BinderHub <https://binderhub.readthedocs.io/en/latest/>`_ (the software that powers mybinder.org), which lists the Dataverse software as one of the supported `repository providers <https://binderhub.readthedocs.io/en/latest/developer/repoproviders.html#supported-repoproviders>`_.
+
+.. _renku:
 
 Renku
 +++++
@@ -136,15 +197,12 @@ Avgidea Data Search
 
 Researchers can use a Google Sheets add-on to search for Dataverse installation's CSV data and then import that data into a sheet. See `Avgidea Data Search <https://www.avgidea.io/avgidea-data-platform.html>`_ for details.
 
+.. _integrations-discovery:
+
 Discoverability
 ---------------
 
-Integration with `DataCite <https://datacite.org>`_ is built in to the Dataverse Software. When datasets are published, metadata is sent to DataCite. You can further increase the discoverability of your datasets by setting up additional integrations.
-
-OAI-PMH (Harvesting)
-++++++++++++++++++++
-
-The Dataverse Software supports a protocol called OAI-PMH that facilitates harvesting datasets from one system into another. For details on harvesting, see the :doc:`harvestserver` section.
+A number of builtin features related to data discovery are listed under :doc:`discoverability` but you can further increase the discoverability of your data by setting up integrations.
 
 SHARE
 +++++
@@ -171,7 +229,14 @@ Sponsored by the `Ontario Council of University Libraries (OCUL) <https://ocul.o
 RDA BagIt (BagPack) Archiving
 +++++++++++++++++++++++++++++
 
-A Dataverse installation can be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant <https://www.rd-alliance.org/system/files/Research%20Data%20Repository%20Interoperability%20WG%20-%20Final%20Recommendations_reviewed_0.pdf>`_ zipped `BagIt <https://tools.ietf.org/html/draft-kunze-bagit-17>`_ bags to the `Chronopolis <https://libraries.ucsd.edu/chronopolis/>`_ via `DuraCloud <https://duraspace.org/duracloud/>`_, to a local file system, or to `Google Cloud Storage <https://cloud.google.com/storage>`_.
+A Dataverse installation can be configured to submit a copy of published Dataset versions, packaged as `Research Data Alliance conformant <https://www.rd-alliance.org/system/files/Research%20Data%20Repository%20Interoperability%20WG%20-%20Final%20Recommendations_reviewed_0.pdf>`_ zipped `BagIt <https://tools.ietf.org/html/draft-kunze-bagit-17>`_ bags to `Chronopolis <https://libraries.ucsd.edu/chronopolis/>`_ via `DuraCloud <https://duraspace.org/duracloud/>`_, a local file system, any S3 store, or to `Google Cloud Storage <https://cloud.google.com/storage>`_.
+Submission can be automated to occur upon publication, or can be done periodically (via external scripting).
+The archival status of each Dataset version can be seen in the Dataset page version table and queried via API.
+
+The archival Bags include all of the files and metadata in a given dataset version and are sufficient to recreate the dataset, e.g. in a new Dataverse instance, or potentially in another RDA-conformant repository.
+Specifically, the archival Bags include an OAI-ORE Map serialized as JSON-LD that describe the dataset and it's files, as well as information about the version of Dataverse used to export the archival Bag.
+
+The `DVUploader <https://github.com/GlobalDataverseCommunityConsortium/dataverse-uploader>`_ includes functionality to recreate a Dataset from an archival Bag produced by Dataverse (using the Dataverse API to do so).
 
 For details on how to configure this integration, see :ref:`BagIt Export` in the :doc:`/installation/config` section of the Installation Guide.
 
diff --git a/doc/sphinx-guides/source/admin/make-data-count.rst b/doc/sphinx-guides/source/admin/make-data-count.rst
index 8a96e949ff9..fe32af6649a 100644
--- a/doc/sphinx-guides/source/admin/make-data-count.rst
+++ b/doc/sphinx-guides/source/admin/make-data-count.rst
@@ -72,7 +72,8 @@ Enable or Disable Display of Make Data Count Metrics
 
 By default, when MDC logging is enabled (when ``:MDCLogPath`` is set), your Dataverse installation will display MDC metrics instead of it's internal (legacy) metrics. You can avoid this (e.g. to collect MDC metrics for some period of time before starting to display them) by setting ``:DisplayMDCMetrics`` to false.
 
-The following discussion assumes ``:MDCLogPath`` has been set to ``/usr/local/payara5/glassfish/domains/domain1/logs/mdc``
+The following discussion assumes ``:MDCLogPath`` has been set to ``/usr/local/payara6/glassfish/domains/domain1/logs/mdc``
+You can also decide to display MDC metrics along with Dataverse's traditional download counts from the time before MDC was enabled. To do this, set the :ref:`:MDCStartDate` to when you started MDC logging.
 
 Configure Counter Processor
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -102,7 +103,7 @@ Soon we will be setting up a cron job to run nightly but we start with a single
 
 * If you are running Counter Processor for the first time in the middle of a month, you will need create blank log files for the previous days. e.g.:
 
-  * ``cd /usr/local/payara5/glassfish/domains/domain1/logs/mdc``
+  * ``cd /usr/local/payara6/glassfish/domains/domain1/logs/mdc``
 
   * ``touch counter_2019-02-01.log``
   
@@ -146,7 +147,9 @@ Configuring Your Dataverse Installation for Make Data Count Citations
 
 Please note: as explained in the note above about limitations, this feature is not available to Dataverse installations that use Handles.
 
-To configure your Dataverse installation to pull citations from the test vs. production DataCite server see :ref:`doi.dataciterestapiurlstring` in the Installation Guide.
+To configure your Dataverse installation to pull citations from the test vs.
+production DataCite server see :ref:`dataverse.pid.datacite.rest-api-url` in
+the Installation Guide.
 
 Please note that in the curl example, Bash environment variables are used with the idea that you can set a few environment variables and copy and paste the examples as is. For example, "$DOI" could become "doi:10.5072/FK2/BL2IBM" by issuing the following export command from Bash:
 
diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst
index 9fb8626d4c4..4f737bd730b 100644
--- a/doc/sphinx-guides/source/admin/metadatacustomization.rst
+++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst
@@ -95,6 +95,11 @@ Each of the three main sections own sets of properties:
 | displayName    | Acts as a brief label for display related to this       | Should be relatively brief. The limit is 256 character, |
 |                | #metadataBlock.                                         | but very long names might cause display problems.       |
 +----------------+---------------------------------------------------------+---------------------------------------------------------+
+| displayFacet   | Label displayed in the search area when this            | Should be brief. Long names will cause display problems |
+|                | #metadataBlock is configured as a search facet          | in the search area.                                     |
+|                | for a collection. See                                   |                                                         |
+|                | :ref:`the API <metadata-block-facet-api>`.              |                                                         |
++----------------+---------------------------------------------------------+---------------------------------------------------------+
 | blockURI       | Associates the properties in a block with an external   | The citation #metadataBlock has the blockURI            |
 |                | URI.                                                    | https://dataverse.org/schema/citation/ which assigns a  |
 |                | Properties will be assigned the                         | default global URI to terms such as                     |
@@ -408,13 +413,10 @@ Setting Up a Dev Environment for Testing
 
 You have several options for setting up a dev environment for testing metadata block changes:
 
-- Vagrant: See the :doc:`/developers/tools` section of the Developer Guide.
-- docker-aio: See https://github.com/IQSS/dataverse/tree/develop/conf/docker-aio
+- Docker: See :doc:`/container/index`.
 - AWS deployment: See the :doc:`/developers/deployment` section of the Developer Guide.
 - Full dev environment: See the :doc:`/developers/dev-environment` section of the Developer Guide.
 
-To get a clean environment in Vagrant, you'll be running ``vagrant destroy``. In Docker, you'll use ``docker rm``. For a full dev environment or AWS installation, you might find ``rebuild`` and related scripts at ``scripts/deploy/phoenix.dataverse.org`` useful.
-
 Editing TSV files
 ~~~~~~~~~~~~~~~~~
 
@@ -452,12 +454,16 @@ metadatablock.name=(the value of **name** property from #metadatablock)
 
 metadatablock.displayName=(the value of **displayName** property from #metadatablock)
 
+metadatablock.displayFacet=(the value of **displayFacet** property from #metadatablock)
+
 example:
 
 metadatablock.name=citation
 
 metadatablock.displayName=Citation Metadata
 
+metadatablock.displayFacet=Citation
+
 #datasetField (field) properties
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 datasetfieldtype.(the value of **name** property from #datasetField).title=(the value of **title** property from #datasetField)
@@ -494,6 +500,8 @@ Running a curl command like "load" example above should make the new custom meta
 
 ``curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" -d "[\"journal\",\"geospatial\"]" http://localhost:8080/api/dataverses/:root/metadatablocks``
 
+.. _update-solr-schema:
+
 Updating the Solr Schema
 ~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -505,7 +513,7 @@ the Solr schema configuration, including any enabled metadata schemas:
 
 ``curl "http://localhost:8080/api/admin/index/solr/schema"``
 
-You can use :download:`update-fields.sh <../../../../conf/solr/8.11.1/update-fields.sh>` to easily add these to the
+You can use :download:`update-fields.sh <../../../../conf/solr/9.3.0/update-fields.sh>` to easily add these to the
 Solr schema you installed for your Dataverse installation.
 
 The script needs a target XML file containing your Solr schema. (See the :doc:`/installation/prerequisites/` section of
@@ -529,7 +537,7 @@ from some place else than your Dataverse installation).
 Please note that reconfigurations of your Solr index might require a re-index. Usually release notes indicate
 a necessary re-index, but for your custom metadata you will need to keep track on your own.
 
-Please note also that if you are going to make a pull request updating ``conf/solr/8.11.1/schema.xml`` with fields you have
+Please note also that if you are going to make a pull request updating ``conf/solr/9.3.0/schema.xml`` with fields you have
 added, you should first load all the custom metadata blocks in ``scripts/api/data/metadatablocks`` (including ones you
 don't care about) to create a complete list of fields. (This might change in the future.)
 
@@ -577,6 +585,58 @@ The scripts required can be hosted locally or retrieved dynamically from https:/
 
 Please note that in addition to the :ref:`:CVocConf` described above, an alternative is the :ref:`:ControlledVocabularyCustomJavaScript` setting.
 
+Protecting MetadataBlocks
+-------------------------
+
+Dataverse can be configured to only allow entries for a metadata block to be changed (created, edited, deleted) by entities that know a defined secret key. 
+Metadata blocks protected by such a key are referred to as "System" metadata blocks. 
+A primary use case for system metadata blocks is to handle metadata created by third-party tools interacting with Dataverse where unintended changes to the metadata could cause a failure. Examples might include archiving systems or workflow engines.
+To protect an existing metadatablock, one must set a key (recommended to be long and un-guessable) for that block:
+
+dataverse.metadata.block-system-metadata-keys.<block name>=<key value>
+
+This can be done using system properties (see :ref:`jvm-options`), environment variables or other MicroProfile Config mechanisms supported by the app server.
+   `See Payara docs for supported sources <https://docs.payara.fish/community/docs/documentation/microprofile/config/README.html#config-sources>`_. Note that a Payara restart may be required to enable the new option.
+
+For these secret keys, Payara password aliases are recommended.
+
+   Alias creation example using the codemeta metadata block (actual name: codeMeta20):
+
+   .. code-block:: shell
+
+      echo "AS_ADMIN_ALIASPASSWORD=1234ChangeMeToSomethingLong" > /tmp/key.txt
+      asadmin create-password-alias --passwordfile /tmp/key.txt dataverse.metadata.block-system-metadata-keys.codeMeta20
+      rm /tmp/key.txt
+      
+   Alias deletion example for the codemeta metadata block (removes protected status)
+   
+   .. code-block:: shell
+
+      asadmin delete-password-alias dataverse.metadata.block-system-metadata-keys.codeMeta20
+
+A Payara restart is required after these example commands.
+
+When protected via a key, a metadata block will not be shown in the user interface when a dataset is being created or when metadata is being edited. Entries in such a system metadata block will be shown to users, consistent with Dataverse's design in which all metadata in published datasets is publicly visible.
+
+Note that protecting a block with required fields, or using a template with an entry in a protected block, will make it impossible to create a new dataset via the user interface. Also note that for this reason protecting the citation metadatablock is not recommended. (Creating a dataset also automatically sets the date of deposit field in the citation block, which would be prohibited if the citation block is protected.) 
+
+To remove proted status and return a block to working normally, remove the associated key.
+
+To add metadata to a system metadata block via API, one must include an additional key of the form 
+
+mdkey.<blockName>=<key value>
+
+as an HTTP Header or query parameter (case sensitive) for each system metadata block to any API call in which metadata values are changed in that block. Multiple keys are allowed if more than one system metadatablock is being changed in a given API call.
+
+For example, following the :ref:`Add Dataset Metadata <add-semantic-metadata>` example from the :doc:`/developers/dataset-semantic-metadata-api`:
+
+.. code-block:: bash
+
+  curl -X PUT -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -H 'mdkey.codeMeta20:1234ChangeMeToSomethingLong' -d '{"codeVersion": "1.0.0", "@context":{"codeVersion": "https://schema.org/softwareVersion"}}' "$SERVER_URL/api/datasets/$DATASET_ID/metadata"
+  
+  curl -X PUT -H X-Dataverse-key:$API_TOKEN -H 'Content-Type: application/ld+json' -d '{"codeVersion": "1.0.1", "@context":{"codeVersion": "https://schema.org/softwareVersion"}}' "$SERVER_URL/api/datasets/$DATASET_ID/metadata?mdkey.codeMeta20=1234ChangeMeToSomethingLong&replace=true"
+    
+
 Tips from the Dataverse Community
 ---------------------------------
 
diff --git a/doc/sphinx-guides/source/admin/monitoring.rst b/doc/sphinx-guides/source/admin/monitoring.rst
index a4affda1302..04fba23a3e8 100644
--- a/doc/sphinx-guides/source/admin/monitoring.rst
+++ b/doc/sphinx-guides/source/admin/monitoring.rst
@@ -1,7 +1,7 @@
 Monitoring
 ===========
 
-Once you're in production, you'll want to set up some monitoring. This page may serve as a starting point for you but you are encouraged to share your ideas with the Dataverse community!
+Once you're in production, you'll want to set up some monitoring. This page may serve as a starting point for you but you are encouraged to share your ideas with the Dataverse community! You may also be interested in the :doc:`/developers/performance` section of the Developer Guide.
 
 .. contents:: Contents:
 	:local:
@@ -14,7 +14,7 @@ In production you'll want to monitor the usual suspects such as CPU, memory, fre
 Munin
 +++++
 
-http://munin-monitoring.org says, "A default installation provides a lot of graphs with almost no work." From RHEL or CentOS 7, you can try the following steps.
+https://munin-monitoring.org says, "A default installation provides a lot of graphs with almost no work." From RHEL or CentOS 7, you can try the following steps.
 
 Enable the EPEL yum repo (if you haven't already):
 
diff --git a/doc/sphinx-guides/source/admin/troubleshooting.rst b/doc/sphinx-guides/source/admin/troubleshooting.rst
index 9f085ba90cd..acbdcaae17e 100644
--- a/doc/sphinx-guides/source/admin/troubleshooting.rst
+++ b/doc/sphinx-guides/source/admin/troubleshooting.rst
@@ -53,15 +53,13 @@ Long-Running Ingest Jobs Have Exhausted System Resources
 
 Ingest is both CPU- and memory-intensive, and depending on your system resources and the size and format of tabular data files uploaded, may render your Dataverse installation unresponsive or nearly inoperable. It is possible to cancel these jobs by purging the ingest queue.
 
-``/usr/local/payara5/mq/bin/imqcmd -u admin query dst -t q -n DataverseIngest`` will query the DataverseIngest destination. The password, unless you have changed it, matches the username.
+``/usr/local/payara6/mq/bin/imqcmd -u admin query dst -t q -n DataverseIngest`` will query the DataverseIngest destination. The password, unless you have changed it, matches the username.
 
-``/usr/local/payara5/mq/bin/imqcmd -u admin purge dst -t q -n DataverseIngest`` will purge the DataverseIngest queue, and prompt for your confirmation.
+``/usr/local/payara6/mq/bin/imqcmd -u admin purge dst -t q -n DataverseIngest`` will purge the DataverseIngest queue, and prompt for your confirmation.
 
 Finally, list destinations to verify that the purge was successful:
 
-``/usr/local/payara5/mq/bin/imqcmd -u admin list dst``
-
-If you are still running Glassfish, substitute glassfish4 for payara5 above. If you have installed your Dataverse installation in some other location, adjust the above paths accordingly.
+``/usr/local/payara6/mq/bin/imqcmd -u admin list dst``
 
 .. _troubleshooting-payara:
 
@@ -73,7 +71,7 @@ Payara
 Finding the Payara Log File
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-``/usr/local/payara5/glassfish/domains/domain1/logs/server.log`` is the main place to look when you encounter problems (assuming you installed Payara in the default directory). Hopefully an error message has been logged. If there's a stack trace, it may be of interest to developers, especially they can trace line numbers back to a tagged version or commit. Send more of the stack trace (the entire file if possible) to developers who can help (see "Getting Help", below) and be sure to say which version of the Dataverse Software you have installed.
+``/usr/local/payara6/glassfish/domains/domain1/logs/server.log`` is the main place to look when you encounter problems (assuming you installed Payara in the default directory). Hopefully an error message has been logged. If there's a stack trace, it may be of interest to developers, especially they can trace line numbers back to a tagged version or commit. Send more of the stack trace (the entire file if possible) to developers who can help (see "Getting Help", below) and be sure to say which version of the Dataverse Software you have installed.
 
 .. _increase-payara-logging:
 
diff --git a/doc/sphinx-guides/source/api/apps.rst b/doc/sphinx-guides/source/api/apps.rst
index 5573056051c..44db666736c 100755
--- a/doc/sphinx-guides/source/api/apps.rst
+++ b/doc/sphinx-guides/source/api/apps.rst
@@ -94,6 +94,13 @@ This series of Python scripts offers a starting point for migrating datasets fro
 
 https://github.com/scholarsportal/dataverse-migration-scripts
 
+idsc.dataverse
+~~~~~~~~~~~~~~
+
+This module can, among others, help you migrate one dataverse to another. (see `migrate.md <https://github.com/iza-institute-of-labor-economics/idsc.dataverse/blob/main/migrate.md>`_)
+
+https://github.com/iza-institute-of-labor-economics/idsc.dataverse
+
 Java
 ----
 
@@ -113,6 +120,16 @@ Dataverse Software on Android makes use of a Dataverse installation's Search API
 
 https://github.com/IQSS/dataverse-android
 
+Go
+--
+
+Integrations Dashboard
+~~~~~~~~~~~~~~~~~~~~~~
+
+The integrations dashboard is software by the Dataverse community to enable easy data transfer from an existing data management platform to a dataset in a Dataverse collection. See :ref:`integrations-dashboard` for details.
+
+https://github.com/libis/rdm-integration
+
 PHP
 ---
 
diff --git a/doc/sphinx-guides/source/api/auth.rst b/doc/sphinx-guides/source/api/auth.rst
index a10de14de5a..eae3bd3c969 100644
--- a/doc/sphinx-guides/source/api/auth.rst
+++ b/doc/sphinx-guides/source/api/auth.rst
@@ -63,3 +63,25 @@ Resetting Your API Token
 ------------------------
 
 You can reset your API Token from your account page in your Dataverse installation as described in the :doc:`/user/account` section of the User Guide.
+
+.. _bearer-tokens:
+
+Bearer Tokens
+-------------
+
+Bearer tokens are defined in `RFC 6750`_ and can be used as an alternative to API tokens if your installation has been set up to use them (see :ref:`bearer-token-auth` in the Installation Guide).
+
+.. _RFC 6750: https://tools.ietf.org/html/rfc6750
+
+To test if bearer tokens are working, you can try something like the following (using the :ref:`User Information` API endpoint), substituting in parameters for your installation and user.
+
+.. code-block:: bash
+
+  export TOKEN=`curl -s -X POST --location "http://keycloak.mydomain.com:8090/realms/test/protocol/openid-connect/token" -H "Content-Type: application/x-www-form-urlencoded" -d "username=user&password=user&grant_type=password&client_id=test&client_secret=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8" | jq '.access_token' -r | tr -d "\n"`
+  
+  curl -H "Authorization: Bearer $TOKEN" http://localhost:8080/api/users/:me
+
+Signed URLs
+-----------
+
+See :ref:`signed-urls`.
diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst
new file mode 100644
index 00000000000..20225b99b5c
--- /dev/null
+++ b/doc/sphinx-guides/source/api/changelog.rst
@@ -0,0 +1,18 @@
+API Changelog (Breaking Changes)
+================================
+
+This API changelog is experimental and we would love feedback on its usefulness. Its primary purpose is to inform API developers of any breaking changes. (We try not ship any backward incompatible changes, but it happens.) To see a list of new APIs and backward-compatible changes to existing API, please see each version's release notes at https://github.com/IQSS/dataverse/releases
+
+.. contents:: |toctitle|
+    :local:
+    :depth: 1
+
+v6.1
+----
+
+- The metadata field "Alternative Title" now supports multiple values so you must pass an array rather than a string when populating that field via API. See https://github.com/IQSS/dataverse/pull/9440
+
+v6.0
+----
+
+- **/api/access/datafile**: When a null or invalid API token is provided to download a public (non-restricted) file with this API call, it will result on a ``401`` error response. Previously, the download was allowed (``200`` response). Please note that we noticed this change sometime between 5.9 and 6.0. If you can help us pinpoint the exact version (or commit!), please get in touch. See :doc:`dataaccess`.
diff --git a/doc/sphinx-guides/source/api/client-libraries.rst b/doc/sphinx-guides/source/api/client-libraries.rst
index bf9f658808b..bd0aa55ba99 100755
--- a/doc/sphinx-guides/source/api/client-libraries.rst
+++ b/doc/sphinx-guides/source/api/client-libraries.rst
@@ -15,12 +15,16 @@ https://github.com/aeonSolutions/OpenScience-Dataverse-API-C-library is the offi
 
 This C/C++ library was created and is currently maintained by `Miguel T. <https://www.linkedin.com/in/migueltomas/>`_ To learn how to install and use it, see the project's `wiki page <https://github.com/aeonSolutions/OpenScience-Dataverse-API-C-library/wiki>`_.
 
+Go
+--
+https://github.com/libis/rdm-dataverse-go-api is Go API library that can be used in your project by simply adding ``github.com/libis/rdm-dataverse-go-api`` as a dependency in your ``go.mod`` file. See the GitHub page for more details and usage examples.
+
 Java
 ----
 
 https://github.com/IQSS/dataverse-client-java is the official Java library for Dataverse APIs.
 
-`Richard Adams <http://www.researchspace.com/electronic-lab-notebook/about_us_team.html>`_ from `ResearchSpace <http://www.researchspace.com>`_ created and maintains this library.
+`Richard Adams <https://www.researchspace.com/electronic-lab-notebook/about_us_team.html>`_ from `ResearchSpace <https://www.researchspace.com>`_ created and maintains this library.
 
 Javascript
 ----------
@@ -44,20 +48,29 @@ There is no official PHP library for Dataverse APIs (please :ref:`get in touch <
 Python
 ------
 
-There are two Python modules for interacting with Dataverse APIs.
+There are multiple Python modules for interacting with Dataverse APIs.
+
+`EasyDataverse <https://github.com/gdcc/easyDataverse>`_ is a Python library designed to simplify the management of Dataverse datasets in an object-oriented way, giving users the ability to upload, download, and update datasets with ease. By utilizing metadata block configurations, EasyDataverse automatically generates Python objects that contain all the necessary details required to create the native Dataverse JSON format used to create or edit datasets. Adding files and directories is also possible with EasyDataverse and requires no additional API calls. This library is particularly well-suited for client applications such as workflows and scripts as it minimizes technical complexities and facilitates swift development.
+
+`python-dvuploader <https://github.com/gdcc/python-dvuploader>`_ implements Jim Myers' excellent `dv-uploader <https://github.com/GlobalDataverseCommunityConsortium/dataverse-uploader>`_ as a Python module. It offers parallel direct uploads to Dataverse backend storage, streams files directly instead of buffering them in memory, and supports multi-part uploads, chunking data accordingly.
+
+`pyDataverse <https://github.com/gdcc/pyDataverse>`_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger <https://stefankasberger.at>`_ from `AUSSDA - The Austrian Social Science Data Archive <https://aussda.at>`_.  
 
-`pyDataverse <https://github.com/gdcc/pyDataverse>`_ primarily allows developers to manage Dataverse collections, datasets and datafiles. Its intention is to help with data migrations and DevOps activities such as testing and configuration management. The module is developed by `Stefan Kasberger <http://stefankasberger.at>`_ from `AUSSDA - The Austrian Social Science Data Archive <https://aussda.at>`_.  
+`UBC's Dataverse Utilities <https://ubc-library-rc.github.io/dataverse_utils/>`_ are a set of Python console utilities which allow one to upload datasets from a tab-separated-value spreadsheet, bulk release multiple datasets, bulk delete unpublished datasets, quickly duplicate records. replace licenses, and more. For additional information see their `PyPi page <https://pypi.org/project/dataverse-utils/>`_.
 
 `dataverse-client-python <https://github.com/IQSS/dataverse-client-python>`_ had its initial release in 2015. `Robert Liebowitz <https://github.com/rliebz>`_ created this library while at the `Center for Open Science (COS) <https://centerforopenscience.org>`_ and the COS uses it to integrate the `Open Science Framework (OSF) <https://osf.io>`_ with Dataverse installations via an add-on which itself is open source and listed on the :doc:`/api/apps` page.
 
+`Pooch <https://github.com/fatiando/pooch>`_ is a Python library that allows library and application developers to download data. Among other features, it takes care of various protocols, caching in OS-specific locations, checksum verification and adds optional features like progress bars or log messages. Among other popular repositories, Pooch supports Dataverse in the sense that you can reference Dataverse-hosted datasets by just a DOI and Pooch will determine the data repository type, query the Dataverse API for contained files and checksums, giving you an easy interface to download them.
+
+`idsc.dataverse <https://github.com/iza-institute-of-labor-economics/idsc.dataverse>`_ reads metadata and files of datasets from a dataverse dataverse.example1.com and writes them into ~/.idsc/dataverse/api/dataverse.example1.com organized in directories PID_type/prefix/suffix, where PID_type is one of: hdl, doi or ark. It can then ''export'' the local copy of the dataverse from ~/.idsc/dataverse/api/dataverse.example1.com to ~/.idsc/.cache/dataverse.example2.com so that one can upload them to dataverse.example2.com.
+
 R
 -
 
 https://github.com/IQSS/dataverse-client-r is the official R package for Dataverse APIs. The latest release can be installed from `CRAN <https://cran.r-project.org/package=dataverse>`_. 
 The R client can search and download datasets. It is useful when automatically (instead of manually) downloading data files as part of a script. For bulk edit and upload operations, we currently recommend pyDataverse.
 
-The package is currently maintained by  `Shiro Kuriwaki <https://github.com/kuriwaki>`_. It was originally created by `Thomas Leeper <http://thomasleeper.com>`_ and then formerly maintained by `Will Beasley <https://github.com/wibeasley>`_.
-
+The package is currently maintained by  `Shiro Kuriwaki <https://github.com/kuriwaki>`_. It was originally created by `Thomas Leeper <https://thomasleeper.com>`_ and then formerly maintained by `Will Beasley <https://github.com/wibeasley>`_.
 
 Ruby
 ----
diff --git a/doc/sphinx-guides/source/api/curation-labels.rst b/doc/sphinx-guides/source/api/curation-labels.rst
index 36950a37eb3..0675eeec398 100644
--- a/doc/sphinx-guides/source/api/curation-labels.rst
+++ b/doc/sphinx-guides/source/api/curation-labels.rst
@@ -93,3 +93,22 @@ To get the list of allowed curation labels allowed for a given Dataset
     curl -H X-Dataverse-key:$API_TOKEN "$SERVER_URL/api/datasets/:persistentId/allowedCurationLabels?persistentId=$DATASET_PID"
 
 You should expect a 200 ("OK") response with a comma-separated list of allowed labels contained in a JSON 'data' object.
+
+
+Get a Report on the Curation Status of All Datasets
+---------------------------------------------------
+
+To get a CSV file listing the curation label assigned to each Dataset with a draft version, along with the creation and last modification dates, and list of those with permissions to publish the version.
+
+This API call is restricted to superusers.
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+ 
+  Example: Get the report
+ 
+    curl -H X-Dataverse-key:$API_TOKEN "$SERVER_URL/api/datasets/listCurationStates"
+
+You should expect a 200 ("OK") response with a CSV formatted response.
diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst
index e76ea167587..f7aaa8f4ee4 100755
--- a/doc/sphinx-guides/source/api/dataaccess.rst
+++ b/doc/sphinx-guides/source/api/dataaccess.rst
@@ -83,7 +83,7 @@ Basic access URI:
 
 ``/api/access/datafile/$id``
 
-.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``.
+.. note:: Files can be accessed using persistent identifiers. This is done by passing the constant ``:persistentId`` where the numeric id of the file is expected, and then passing the actual persistent id as a query parameter with the name ``persistentId``. However, this file access method is only effective when the FilePIDsEnabled option is enabled, which can be authorized by the admin. For further information, refer to :ref:`:FilePIDsEnabled`. 
 
   Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB* ::
 
@@ -403,3 +403,32 @@ This method returns a list of Authenticated Users who have requested access to t
 A curl example using an ``id``::
 
     curl -H "X-Dataverse-key:$API_TOKEN" -X GET http://$SERVER/api/access/datafile/{id}/listRequests
+
+User Has Requested Access to a File:
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``/api/access/datafile/{id}/userFileAccessRequested``
+
+This method returns true or false depending on whether or not the calling user has requested access to a particular file.
+
+A curl example using an ``id``::
+
+    curl -H "X-Dataverse-key:$API_TOKEN" -X GET "http://$SERVER/api/access/datafile/{id}/userFileAccessRequested"
+
+
+Get User Permissions on a File:
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+``/api/access/datafile/{id}/userPermissions``
+
+This method returns the permissions that the calling user has on a particular file.
+
+In particular, the user permissions that this method checks, returned as booleans, are the following:
+
+* Can download the file
+* Can manage the file permissions
+* Can edit the file owner dataset
+
+A curl example using an ``id``::
+
+    curl -H "X-Dataverse-key:$API_TOKEN" -X GET "http://$SERVER/api/access/datafile/{id}/userPermissions"
diff --git a/doc/sphinx-guides/source/api/external-tools.rst b/doc/sphinx-guides/source/api/external-tools.rst
index eec9944338f..ae0e44b36aa 100644
--- a/doc/sphinx-guides/source/api/external-tools.rst
+++ b/doc/sphinx-guides/source/api/external-tools.rst
@@ -11,7 +11,7 @@ Introduction
 
 External tools are additional applications the user can access or open from your Dataverse installation to preview, explore, and manipulate data files and datasets. The term "external" is used to indicate that the tool is not part of the main Dataverse Software.
 
-Once you have created the external tool itself (which is most of the work!), you need to teach a Dataverse installation how to construct URLs that your tool needs to operate. For example, if you've deployed your tool to fabulousfiletool.com your tool might want the ID of a file and the siteUrl of the Dataverse installation like this: https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org
+Once you have created the external tool itself (which is most of the work!), you need to teach a Dataverse installation how to construct URLs that your tool needs to operate. For example, if you've deployed your tool to fabulousfiletool.com your tool might want the ID of a file and the siteUrl of the Dataverse installation like this: https://fabulousfiletool.com?fileId=42&siteUrl=https://demo.dataverse.org
 
 In short, you will be creating a manifest in JSON format that describes not only how to construct URLs for your tool, but also what types of files your tool operates on, where it should appear in the Dataverse installation web interfaces, etc. 
 
@@ -39,8 +39,8 @@ How External Tools Are Presented to Users
 
 An external tool can appear in your Dataverse installation in a variety of ways:
 
-- as an explore, preview, or configure option for a file
-- as an explore option for a dataset
+- as an explore, preview, query or configure option for a file
+- as an explore or configure option for a dataset
 - as an embedded preview on the file landing page
 
 See also the :ref:`testing-external-tools` section of the Admin Guide for some perspective on how Dataverse installations will expect to test your tool before announcing it to their users.
@@ -88,11 +88,11 @@ Terminology
 
     displayName                  The **name** of the tool in the Dataverse installation web interface. For example, "Data Explorer".
 
-    description                  The **description** of the tool, which appears in a popup (for configure tools only) so the user who clicked the tool can learn about the tool before being redirected the tool in a new tab in their browser. HTML is supported.
+    description                  The **description** of the tool, which appears in a popup (for configure tools only) so the user who clicked the tool can learn about the tool before being redirected to the tool in a new tab in their browser. HTML is supported.
 
     scope                        Whether the external tool appears and operates at the **file** level or the **dataset** level. Note that a file level tool much also specify the type of file it operates on (see "contentType" below).
 
-    types                        Whether the external tool is an **explore** tool, a **preview** tool, a **configure** tool or any combination of these (multiple types are supported for a single tool). Configure tools require an API token because they make changes to data files (files within datasets). Configure tools are currently not supported at the dataset level. The older "type" keyword that allows you to pass a single type as a string is deprecated but still supported.
+    types                        Whether the external tool is an **explore** tool, a **preview** tool, a **query** tool, a **configure** tool or any combination of these (multiple types are supported for a single tool). Configure tools require an API token because they make changes to data files (files within datasets). The older "type" keyword that allows you to pass a single type as a string is deprecated but still supported.
 
     toolUrl                      The **base URL** of the tool before query parameters are added.
     
@@ -102,7 +102,7 @@ Terminology
     
     httpMethod                   Either ``GET`` or ``POST``.
 
-    queryParameters              **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=http://demo.dataverse.org``.
+    queryParameters              **Key/value combinations** that can be appended to the toolUrl. For example, once substitution takes place (described below) the user may be redirected to ``https://fabulousfiletool.com?fileId=42&siteUrl=https://demo.dataverse.org``.
 
     query parameter keys         An **arbitrary string** to associate with a value that is populated with a reserved word (described below). As the author of the tool, you have control over what "key" you would like to be passed to your tool. For example, if you want to have your tool receive and operate on the query parameter "dataverseFileId=42" instead of just "fileId=42", that's fine.
 
@@ -160,17 +160,25 @@ Authorization Options
 
 When called for datasets or data files that are not public (i.e. in a draft dataset or for a restricted file), external tools are allowed access via the user's credentials. This is accomplished by one of two mechanisms:
 
-* Signed URLs (more secure, recommended)
+.. _signed-urls:
 
-  - Configured via the ``allowedApiCalls`` section of the manifest. The tool will be provided with signed URLs allowing the specified access to the given dataset or datafile for the specified amount of time. The tool will not be able to access any other datasets or files the user may have access to and will not be able to make calls other than those specified.
-  - For tools invoked via a GET call, Dataverse will include a callback query parameter with a Base64 encoded value. The decoded value is a signed URL that can be called to retrieve a JSON response containing all of the queryParameters and allowedApiCalls specified in the manfiest.
-  - For tools invoked via POST, Dataverse will send a JSON body including the requested queryParameters and allowedApiCalls. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool.
+Signed URLs
+^^^^^^^^^^^
 
-* API Token (deprecated, less secure, not recommended)
+The signed URL mechanism is more secure than exposing API tokens and therefore recommended.
 
-  - Configured via the ``queryParameters`` by including an ``{apiToken}`` value. When this is present Dataverse will send the user's apiToken to the tool. With the user's API token, the tool can perform any action via the Dataverse API that the user could. External tools configured via this method should be assessed for their trustworthiness.
-  - For tools invoked via GET, this will be done via a query parameter in the request URL which could be cached in the browser's history. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool.
-  - For tools invoked via POST, Dataverse will send a JSON body including the apiToken.
+- Configured via the ``allowedApiCalls`` section of the manifest. The tool will be provided with signed URLs allowing the specified access to the given dataset or datafile for the specified amount of time. The tool will not be able to access any other datasets or files the user may have access to and will not be able to make calls other than those specified.
+- For tools invoked via a GET call, Dataverse will include a callback query parameter with a Base64 encoded value. The decoded value is a signed URL that can be called to retrieve a JSON response containing all of the queryParameters and allowedApiCalls specified in the manfiest.
+- For tools invoked via POST, Dataverse will send a JSON body including the requested queryParameters and allowedApiCalls. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool.
+
+API Token
+^^^^^^^^^
+
+The API token mechanism is deprecated. Because it is less secure than signed URLs, it is not recommended for new external tools.
+
+- Configured via the ``queryParameters`` by including an ``{apiToken}`` value. When this is present Dataverse will send the user's apiToken to the tool. With the user's API token, the tool can perform any action via the Dataverse API that the user could. External tools configured via this method should be assessed for their trustworthiness.
+- For tools invoked via GET, this will be done via a query parameter in the request URL which could be cached in the browser's history. Dataverse expects the response to the POST to indicate a redirect which Dataverse will use to open the tool.
+- For tools invoked via POST, Dataverse will send a JSON body including the apiToken.
 
 Internationalization of Your External Tool
 ++++++++++++++++++++++++++++++++++++++++++
@@ -187,6 +195,7 @@ Using Example Manifests to Get Started
 ++++++++++++++++++++++++++++++++++++++
 
 Again, you can use :download:`fabulousFileTool.json <../_static/installation/files/root/external-tools/fabulousFileTool.json>` or :download:`dynamicDatasetTool.json <../_static/installation/files/root/external-tools/dynamicDatasetTool.json>` as a starting point for your own manifest file.
+Additional working examples, including ones using :ref:`signed-urls`, are available at https://github.com/gdcc/dataverse-previewers .
 
 Testing Your External Tool
 --------------------------
diff --git a/doc/sphinx-guides/source/api/getting-started.rst b/doc/sphinx-guides/source/api/getting-started.rst
index c465b726421..a50f12d1381 100644
--- a/doc/sphinx-guides/source/api/getting-started.rst
+++ b/doc/sphinx-guides/source/api/getting-started.rst
@@ -9,9 +9,9 @@ If you are a researcher or curator who wants to automate parts of your workflow,
 Servers You Can Test With
 -------------------------
 
-Rather than using a production Dataverse installation, API users are welcome to use http://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server.  
+Rather than using a production Dataverse installation, API users are welcome to use https://demo.dataverse.org for testing. You can email support@dataverse.org if you have any trouble with this server.  
 
-If you would rather have full control over your own test server, deployments to AWS, Docker, Vagrant, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`.
+If you would rather have full control over your own test server, deployments to AWS, Docker, and more are covered in the :doc:`/developers/index` and the :doc:`/installation/index`.
 
 Getting an API Token
 --------------------
@@ -52,6 +52,20 @@ If you ever want to check an environment variable, you can "echo" it like this:
 
   echo $SERVER_URL
 
+With curl version 7.56.0 and higher, it is recommended to use --form-string with outer quote rather than -F flag without outer quote.
+
+For example, curl command parameter below might cause error such as ``warning: garbage at end of field specification: ,"categories":["Data"]}``.
+
+.. code-block:: bash
+
+  -F jsonData={\"description\":\"My description.\",\"categories\":[\"Data\"]}
+
+Instead, use --form-string with outer quote. See https://github.com/curl/curl/issues/2022
+
+.. code-block:: bash
+
+  --form-string 'jsonData={"description":"My description.","categories":["Data"]}'
+
 If you don't like curl, don't have curl, or want to use a different programming language, you are encouraged to check out the Python, Javascript, R, and Java options in the :doc:`client-libraries` section.
 
 .. _curl: https://curl.haxx.se
diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst
index c9e79098546..dd195aa9d62 100755
--- a/doc/sphinx-guides/source/api/index.rst
+++ b/doc/sphinx-guides/source/api/index.rst
@@ -24,3 +24,4 @@ API Guide
    linkeddatanotification
    apps
    faq
+   changelog
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/api/intro.rst b/doc/sphinx-guides/source/api/intro.rst
index 933932cd7b9..8eb11798dd7 100755
--- a/doc/sphinx-guides/source/api/intro.rst
+++ b/doc/sphinx-guides/source/api/intro.rst
@@ -187,6 +187,10 @@ Lists of Dataverse APIs
   - Files
   - etc.
 
+- :doc:`/developers/dataset-semantic-metadata-api`: For creating, reading, editing, and deleting dataset metadata using JSON-LD.
+- :doc:`/developers/dataset-migration-api`: For migrating datasets from other repositories while retaining the original persistent identifiers and publication date.
+- :doc:`/developers/s3-direct-upload-api`: For the transfer of larger files/larger numbers of files directly to an S3 bucket managed by Dataverse. 
+- :doc:`/developers/globus-api`: For the Globus transfer of larger files/larger numbers of files directly via Globus endpoints managed by Dataverse or referencing files in remote endpoints. 
 - :doc:`metrics`: For query statistics about usage of a Dataverse installation.
 - :doc:`sword`: For depositing data using a standards-based approach rather than the :doc:`native-api`.
 
@@ -237,7 +241,7 @@ Dataverse Software API questions are on topic in all the usual places:
 
 - The dataverse-community Google Group: https://groups.google.com/forum/#!forum/dataverse-community
 - The Dataverse Project community calls: https://dataverse.org/community-calls
-- The Dataverse Project chat room: http://chat.dataverse.org 
+- The Dataverse Project chat room: https://chat.dataverse.org 
 - The Dataverse Project ticketing system: support@dataverse.org
 
 After your question has been answered, you are welcome to help improve the :doc:`faq` section of this guide.
diff --git a/doc/sphinx-guides/source/api/metrics.rst b/doc/sphinx-guides/source/api/metrics.rst
index f1eb1f88c71..613671e49d1 100755
--- a/doc/sphinx-guides/source/api/metrics.rst
+++ b/doc/sphinx-guides/source/api/metrics.rst
@@ -158,8 +158,15 @@ The following table lists the available metrics endpoints (not including the Mak
     /api/info/metrics/uniquedownloads,"pid, count",json,collection subtree,published,y,total count of unique users who have downloaded from the datasets in scope,The use case for this metric (uniquedownloads) is to more fairly assess which datasets are getting downloaded/used by only counting each users who downloads any file from a dataset as one count (versus downloads of multiple files or repeat downloads counting as multiple counts which adds a bias for large datasets and/or use patterns where a file is accessed repeatedly for new analyses)
     /api/info/metrics/uniquedownloads/monthly,"date, pid, count","json, csv",collection subtree,published,y,monthly cumulative timeseries of unique user counts for datasets in the dataverse scope,
     /api/info/metrics/uniquedownloads/toMonth/{yyyy-MM},"pid, count",json,collection subtree,published,y,cumulative count of unique users who have downloaded from the datasets in scope through specified month,
-    /api/info/metrics/filedownloads/monthly,"date, count, id, pid","json, csv",collection subtree,published,y,"monthly cumulative  timeseries by file id, pid from first date of first entry to now","unique downloads (as defined above) per month by file (id, pid) sorted in decreasing order of counts"
     /api/info/metrics/uniquefiledownloads,"count by id, pid","json, csv",collection subtree,published,y,as of now/totals,unique download counts per file id. PIDs are also included in output if they exist
+    /api/info/metrics/uniquefiledownloads/monthly,"date, count, id, pid","json, csv",collection subtree,published,y,"monthly cumulative  timeseries by file id, pid from first date of first entry to now","unique downloads per month by file (id, pid) sorted in decreasing order of counts"
     /api/info/metrics/uniquefiledownloads/toMonth/{yyyy-MM},"count by id, pid","json, csv",collection subtree,published,y,cumulative up to month specified,unique download counts per file id to the specified month. PIDs are also included in output if they exist
     /api/info/metrics/tree,"id, ownerId, alias, depth, name, children",json,collection subtree,published,y,"tree of dataverses starting at the root or a specified parentAlias with their id, owner id, alias, name, a computed depth, and array of children dataverses","underlying code can also include draft dataverses, this is not currently accessible via api, depth starts at 0"
     /api/info/metrics/tree/toMonth/{yyyy-MM},"id, ownerId, alias, depth, name, children",json,collection subtree,published,y,"tree of dataverses in existence as of specified date starting at the root or a specified parentAlias with their id, owner id, alias, name, a computed depth, and array of children dataverses","underlying code can also include draft dataverses, this is not currently accessible via api, depth starts at 0"
+
+Related API Endpoints
+---------------------
+
+The following endpoints are not under the metrics namespace but also return counts:
+
+- :ref:`file-download-count` 
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index 3cd469e3883..56190dd342c 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -9,7 +9,7 @@ The Dataverse Software exposes most of its GUI functionality via a REST-based AP
 
 .. _CORS: https://www.w3.org/TR/cors/
 
-.. warning:: The Dataverse Software's API is versioned at the URI - all API calls may include the version number like so: ``http://server-address/api/v1/...``. Omitting the ``v1`` part would default to the latest API version (currently 1). When writing scripts/applications that will be used for a long time, make sure to specify the API version, so they don't break when the API is upgraded.
+.. warning:: The Dataverse Software's API is versioned at the URI - all API calls may include the version number like so: ``https://server-address/api/v1/...``. Omitting the ``v1`` part would default to the latest API version (currently 1). When writing scripts/applications that will be used for a long time, make sure to specify the API version, so they don't break when the API is upgraded.
 
 .. contents:: |toctitle|
     :local:
@@ -56,13 +56,13 @@ Next you need to figure out the alias or database id of the "parent" Dataverse c
   export SERVER_URL=https://demo.dataverse.org
   export PARENT=root
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$PARENT --upload-file dataverse-complete.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$PARENT" --upload-file dataverse-complete.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root --upload-file dataverse-complete.json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root" --upload-file dataverse-complete.json
 
 You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the newly-created Dataverse collection.
 
@@ -80,13 +80,13 @@ To view a published Dataverse collection:
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl $SERVER_URL/api/dataverses/$ID
+  curl "$SERVER_URL/api/dataverses/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/dataverses/root
+  curl "https://demo.dataverse.org/api/dataverses/root"
 
 To view an unpublished Dataverse collection:
 
@@ -96,13 +96,13 @@ To view an unpublished Dataverse collection:
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root"
 
 Delete a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -117,13 +117,13 @@ Deletes the Dataverse collection whose database ID or alias is given:
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/dataverses/$ID
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/dataverses/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/dataverses/root
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root"
 
 .. _show-contents-of-a-dataverse-api:
 
@@ -140,13 +140,13 @@ Show Contents of a Dataverse Collection
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/contents
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/contents"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/contents
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/contents"
 
 Report the data (file) size of a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -159,13 +159,13 @@ Shows the combined size in bytes of all the files uploaded into the Dataverse co
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/storagesize
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/storagesize"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/storagesize
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/storagesize"
 
 The size of published and unpublished files will be summed both in the Dataverse collection specified and beneath all its sub-collections, recursively. 
 By default, only the archival files are counted - i.e., the files uploaded by users (plus the tab-delimited versions generated for tabular data files on ingest). If the optional argument ``includeCached=true`` is specified, the API will also add the sizes of all the extra files generated and cached by the Dataverse installation - the resized thumbnail versions for image files, the metadata exports for published datasets, etc. 
@@ -181,13 +181,13 @@ All the roles defined directly in the Dataverse collection identified by ``id``:
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/roles
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/roles"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/roles
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/roles"
 
 List Facets Configured for a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -200,13 +200,13 @@ List Facets Configured for a Dataverse Collection
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/facets
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/facets"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/facets
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/facets"
 
 Set Facets for a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -219,16 +219,18 @@ Assign search facets for a given Dataverse collection identified by ``id``:
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/dataverses/$ID/facets --upload-file dataverse-facets.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/facets" --upload-file dataverse-facets.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/facets --upload-file dataverse-facets.json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/facets" --upload-file dataverse-facets.json
 
 Where :download:`dataverse-facets.json <../_static/api/dataverse-facets.json>` contains a JSON encoded list of metadata keys (e.g. ``["authorName","authorAffiliation"]``).
 
+.. _metadata-block-facet-api:
+
 List Metadata Block Facets Configured for a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
@@ -240,13 +242,13 @@ List Metadata Block Facets Configured for a Dataverse Collection
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/metadatablockfacets
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablockfacets"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/metadatablockfacets
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets"
 
 Set Metadata Block Facets for a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -263,13 +265,13 @@ To clear the metadata blocks set by a parent collection, submit an empty array (
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" $SERVER_URL/api/dataverses/$ID/metadatablockfacets --upload-file metadata-block-facets.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" "$SERVER_URL/api/dataverses/$ID/metadatablockfacets" --upload-file metadata-block-facets.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/metadatablockfacets --upload-file metadata-block-facets.json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets" --upload-file metadata-block-facets.json
 
 Where :download:`metadata-block-facets.json <../_static/api/metadata-block-facets.json>` contains a JSON encoded list of metadata block names (e.g. ``["socialscience","geospatial"]``). This endpoint supports an empty list (e.g. ``[]``)
 
@@ -288,13 +290,15 @@ When updating the root to false, it will clear any metadata block facets from th
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-type:application/json" $SERVER_URL/api/dataverses/$ID/metadatablockfacets/isRoot -d 'true'
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" "$SERVER_URL/api/dataverses/$ID/metadatablockfacets/isRoot" -d 'true'
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/metadatablockfacets/isRoot -d 'true'
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/metadatablockfacets/isRoot" -d 'true'
+
+.. _create-role-in-collection:
 
 Create a New Role in a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -307,24 +311,15 @@ Creates a new role under Dataverse collection ``id``. Needs a json file with the
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/roles --upload-file roles.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/roles" --upload-file roles.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/roles" --upload-file roles.json
 
-Where ``roles.json`` looks like this::
-
-  {
-    "alias": "sys1",
-    "name": “Restricted System Role”,
-    "description": “A person who may only add datasets.”,
-    "permissions": [
-      "AddDataset"
-    ]
-  } 
+For ``roles.json`` see :ref:`json-representation-of-a-role`
 
 .. note:: Only a Dataverse installation account with superuser permissions is allowed to create roles in a Dataverse Collection.
 
@@ -341,13 +336,13 @@ List all the role assignments at the given Dataverse collection:
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/assignments
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/assignments"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/assignments
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/assignments"
 
 Assign Default Role to User Creating a Dataset in a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -361,13 +356,13 @@ Assign a default role to a user creating a dataset in a Dataverse collection ``i
   export ID=root
   export ROLE_ALIAS=curator
 
-  curl -H X-Dataverse-key:$API_TOKEN -X PUT $SERVER_URL/api/dataverses/$ID/defaultContributorRole/$ROLE_ALIAS
+  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/dataverses/$ID/defaultContributorRole/$ROLE_ALIAS"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/defaultContributorRole/curator
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/dataverses/root/defaultContributorRole/curator"
 
 Note: You may use "none" as the ``ROLE_ALIAS``. This will prevent a user who creates a dataset from having any role on that dataset. It is not recommended for Dataverse collections with human contributors.
 
@@ -384,13 +379,13 @@ Assigns a new role, based on the POSTed JSON:
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" $SERVER_URL/api/dataverses/$ID/assignments --upload-file role.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-Type: application/json" "$SERVER_URL/api/dataverses/$ID/assignments" --upload-file role.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-Type: application/json" https://demo.dataverse.org/api/dataverses/root/assignments --upload-file role.json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-Type: application/json" "https://demo.dataverse.org/api/dataverses/root/assignments" --upload-file role.json
 
 POSTed JSON example (the content of ``role.json`` file)::
 
@@ -413,13 +408,13 @@ Delete the assignment whose id is ``$id``:
   export ID=root
   export ASSIGNMENT_ID=6
 
-  curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/dataverses/$ID/assignments/$ASSIGNMENT_ID
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/dataverses/$ID/assignments/$ASSIGNMENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/dataverses/root/assignments/6
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/dataverses/root/assignments/6"
 
 List Metadata Blocks Defined on a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -436,13 +431,13 @@ Please note that an API token is only required if the Dataverse collection has n
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/metadatablocks
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/metadatablocks
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks"
 
 Define Metadata Blocks for a Dataverse Collection
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -461,13 +456,13 @@ The metadata blocks that are available with a default Dataverse installation are
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/metadatablocks -H \"Content-type:application/json\" --upload-file define-metadatablocks.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/metadatablocks" -H \"Content-type:application/json\" --upload-file define-metadatablocks.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" --upload-file define-metadatablocks.json https://demo.dataverse.org/api/dataverses/root/metadatablocks
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" --upload-file define-metadatablocks.json "https://demo.dataverse.org/api/dataverses/root/metadatablocks"
 
 Determine if a Dataverse Collection Inherits Its Metadata Blocks from Its Parent
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -480,13 +475,13 @@ Get whether the Dataverse collection is a metadata block root, or does it uses i
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot"
 
 Configure a Dataverse Collection to Inherit Its Metadata Blocks from Its Parent
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -500,16 +495,66 @@ values are ``true`` and ``false`` (both are valid JSON expressions):
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN -X PUT $SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot
+  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/dataverses/$ID/metadatablocks/isRoot"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot"
+
+.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST https://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported.
+
+.. _get-dataset-json-schema:
+
+Retrieve a Dataset JSON Schema for a Collection
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Retrieves a JSON schema customized for a given collection in order to validate a dataset JSON file prior to creating the dataset. This
+first version of the schema only includes required elements and fields. In the future we plan to improve the schema by adding controlled
+vocabulary and more robust dataset field format testing:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=root
+
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/datasetSchema"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X PUT https://demo.dataverse.org/api/dataverses/root/metadatablocks/isRoot
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/datasetSchema"
+
+Note: you must have "Add Dataset" permission in the given collection to invoke this endpoint.
+
+While it is recommended to download a copy of the JSON Schema from the collection (as above) to account for any fields that have been marked as required, you can also download a minimal :download:`dataset-schema.json <../_static/api/dataset-schema.json>` to get a sense of the schema when no customizations have been made.
+
+.. _validate-dataset-json:
+
+Validate Dataset JSON File for a Collection
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Validates a dataset JSON file customized for a given collection prior to creating the dataset. The validation only tests for json formatting
+and the presence of required elements:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=root
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/validateDatasetJson" -H 'Content-type:application/json' --upload-file dataset.json
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
 
-.. note:: Previous endpoints ``$SERVER/api/dataverses/$id/metadatablocks/:isRoot`` and ``POST http://$SERVER/api/dataverses/$id/metadatablocks/:isRoot?key=$apiKey`` are deprecated, but supported.
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/validateDatasetJson" -H 'Content-type:application/json' --upload-file dataset.json
 
+Note: you must have "Add Dataset" permission in the given collection to invoke this endpoint.
 
 .. _create-dataset-command: 
 
@@ -526,6 +571,66 @@ To create a dataset, you must supply a JSON file that contains at least the foll
 - Description Text
 - Subject
 
+Submit Incomplete Dataset
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+**Note:** This feature requires :ref:`dataverse.api.allow-incomplete-metadata` to be enabled and your Solr
+Schema to be up-to-date with the ``datasetValid`` field. If not done yet with the version upgrade, you will
+also need to reindex all dataset after enabling the :ref:`dataverse.api.allow-incomplete-metadata` feature.
+
+Providing a ``.../datasets?doNotValidate=true`` query parameter turns off the validation of metadata.
+In this situation, only the "Author Name" is required, except for the case when the setting :ref:`:MetadataLanguages`
+is configured and the value of "Dataset Metadata Language" setting of a collection is left with the default
+"Chosen at Dataset Creation" value. In that case, a language that is a part of the :ref:`:MetadataLanguages` list must be
+declared in the incomplete dataset.
+
+For example, a minimal JSON file, without the language specification, would look like this:
+
+.. code-block:: json
+  :name: dataset-incomplete.json
+
+  {
+    "datasetVersion": {
+      "metadataBlocks": {
+        "citation": {
+          "fields": [
+            {
+              "value": [
+                {
+                  "authorName": {
+                    "value": "Finch, Fiona",
+                    "typeClass": "primitive",
+                    "multiple": false,
+                    "typeName": "authorName"
+                  }
+                }
+              ],
+              "typeClass": "compound",
+              "multiple": true,
+              "typeName": "author"
+            }
+          ],
+          "displayName": "Citation Metadata"
+        }
+      }
+    }
+  }
+
+The following is an example HTTP call with deactivated validation:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export PARENT=root
+  export SERVER_URL=https://demo.dataverse.org
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$PARENT/datasets?doNotValidate=true" --upload-file dataset-incomplete.json -H 'Content-type:application/json'
+
+**Note:** You may learn about an instance's support for deposition of incomplete datasets via :ref:`info-incomplete-metadata`.
+
+Submit Dataset
+^^^^^^^^^^^^^^
+
 As a starting point, you can download :download:`dataset-finch1.json <../../../../scripts/search/tests/data/dataset-finch1.json>` and modify it to meet your needs. (:download:`dataset-finch1_fr.json <../../../../scripts/api/data/dataset-finch1_fr.json>` is a variant of this file that includes setting the metadata language (see :ref:`:MetadataLanguages`) to French (fr). In addition to this minimal example, you can download :download:`dataset-create-new-all-default-fields.json <../../../../scripts/api/data/dataset-create-new-all-default-fields.json>` which populates all of the metadata fields that ship with a Dataverse installation.)
 
 The curl command below assumes you have kept the name "dataset-finch1.json" and that this file is in your current working directory.
@@ -540,7 +645,7 @@ Next you need to figure out the alias or database id of the "parent" Dataverse c
   export PARENT=root
   export SERVER_URL=https://demo.dataverse.org
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST "$SERVER_URL/api/dataverses/$PARENT/datasets" --upload-file dataset-finch1.json -H 'Content-type:application/json'
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$PARENT/datasets" --upload-file dataset-finch1.json -H 'Content-type:application/json'
 
 The fully expanded example above (without the environment variables) looks like this:
 
@@ -568,13 +673,13 @@ To import a dataset with an existing persistent identifier (PID), the dataset's
   export DATAVERSE_ID=root
   export PERSISTENT_IDENTIFIER=doi:ZZ7/MOSEISLEYDB94
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:import?pid=$PERSISTENT_IDENTIFIER&release=yes --upload-file dataset.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:import?pid=$PERSISTENT_IDENTIFIER&release=yes" --upload-file dataset.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-    curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/datasets/:import?pid=doi:ZZ7/MOSEISLEYDB94&release=yes --upload-file dataset.json
+    curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/datasets/:import?pid=doi:ZZ7/MOSEISLEYDB94&release=yes" --upload-file dataset.json
 
 The ``pid`` parameter holds a persistent identifier (such as a DOI or Handle). The import will fail if no PID is provided, or if the provided PID fails validation.
 
@@ -609,13 +714,13 @@ To import a dataset with an existing persistent identifier (PID), you have to pr
   export DATAVERSE_ID=root
   export PERSISTENT_IDENTIFIER=doi:ZZ7/MOSEISLEYDB94
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:importddi?pid=$PERSISTENT_IDENTIFIER&release=yes --upload-file ddi_dataset.xml
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$DATAVERSE_ID/datasets/:importddi?pid=$PERSISTENT_IDENTIFIER&release=yes" --upload-file ddi_dataset.xml
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/datasets/:importddi?pid=doi:ZZ7/MOSEISLEYDB94&release=yes --upload-file ddi_dataset.xml
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/datasets/:importddi?pid=doi:ZZ7/MOSEISLEYDB94&release=yes" --upload-file ddi_dataset.xml
 
 The optional ``pid`` parameter holds a persistent identifier (such as a DOI or Handle). The import will fail if the provided PID fails validation.
 
@@ -645,13 +750,13 @@ In order to publish a Dataverse collection, you must know either its "alias" (wh
   export SERVER_URL=https://demo.dataverse.org
   export ID=root
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/dataverses/$ID/actions/:publish
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/actions/:publish"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/dataverses/root/actions/:publish  
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/dataverses/root/actions/:publish"
 
 You should expect a 200 ("OK") response and JSON output.
 
@@ -674,13 +779,66 @@ In order to retrieve the Guestbook Responses for a Dataverse collection, you mus
   export GUESTBOOK_ID=1
   export FILENAME=myResponses.csv 
 
-  curl -H  X-Dataverse-key:$API_TOKEN $SERVER_URL/api/dataverses/$ID/guestbookResponses?guestbookId=$GUESTBOOK_ID -o $FILENAME
+  curl -H  "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/guestbookResponses?guestbookId=$GUESTBOOK_ID" -o $FILENAME
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/dataverses/root/guestbookResponses?guestbookId=1 -o myResponses.csv
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/dataverses/root/guestbookResponses?guestbookId=1" -o myResponses.csv
+
+.. _collection-attributes-api:
+  
+Change Collection Attributes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: 
+
+  curl -X PUT -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/attribute/$ATTRIBUTE?value=$VALUE"
+
+The following attributes are supported:
+
+* ``alias``  Collection alias
+* ``name`` Name
+* ``description`` Description
+* ``affiliation`` Affiliation
+* ``filePIDsEnabled`` ("true" or "false") Restricted to use by superusers and only when the :ref:`:AllowEnablingFilePIDsPerCollection <:AllowEnablingFilePIDsPerCollection>` setting is true. Enables or disables registration of file-level PIDs in datasets within the collection (overriding the instance-wide setting).
+
+.. _collection-storage-quotas:
+  
+Collection Storage Quotas
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: 
+
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/storage/quota"
+
+Will output the storage quota allocated (in bytes), or a message indicating that the quota is not defined for the specific collection. The user identified by the API token must have the ``Manage`` permission on the collection. 
+
+
+To set or change the storage allocation quota for a collection:
+
+.. code-block:: 
+
+  curl -X PUT -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/storage/quota/$SIZE_IN_BYTES"
+
+This is API is superuser-only.
+  
+
+To delete a storage quota configured for a collection:
+
+.. code-block:: 
+
+  curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/dataverses/$ID/storage/quota"
+
+This is API is superuser-only.
+
+Use the ``/settings`` API to enable or disable the enforcement of storage quotas that are defined across the instance via the following setting. For example,
+
+.. code-block:: 
+
+   curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:UseStorageQuotas
+
 
 Datasets
 --------
@@ -707,13 +865,13 @@ Example: Getting the dataset whose DOI is *10.5072/FK2/J8SJZB*:
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:$API_TOKEN" https://demo.dataverse.org/api/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB
+  curl -H "X-Dataverse-key:$API_TOKEN" "https://demo.dataverse.org/api/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB"
 
 Getting its draft version:
 
@@ -722,13 +880,13 @@ Getting its draft version:
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
 
-  curl -H "X-Dataverse-key:$API_TOKEN" http://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER
+  curl -H "X-Dataverse-key:$API_TOKEN" "https://$SERVER/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:$API_TOKEN" https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB
+  curl -H "X-Dataverse-key:$API_TOKEN" "https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/J8SJZB"
 
 |CORS| Show the dataset whose database id is passed:
 
@@ -737,13 +895,13 @@ The fully expanded example above (without environment variables) looks like this
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl $SERVER_URL/api/datasets/$ID
+  curl "$SERVER_URL/api/datasets/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/datasets/24
+  curl "https://demo.dataverse.org/api/datasets/24"
 
 The dataset id can be extracted from the response retrieved from the API which uses the persistent identifier (``/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER``).
 
@@ -757,13 +915,13 @@ List Versions of a Dataset
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl $SERVER_URL/api/datasets/$ID/versions
+  curl "$SERVER_URL/api/datasets/$ID/versions"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/datasets/24/versions
+  curl "https://demo.dataverse.org/api/datasets/24/versions"
 
 It returns a list of versions with their metadata, and file list:
 
@@ -816,6 +974,10 @@ It returns a list of versions with their metadata, and file list:
     ]
   }
 
+The optional ``includeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``true``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. 
+
+The optional ``offset`` and ``limit`` parameters can be used to specify the range of the versions list to be shown. This can be used to paginate through the list in a dataset with a large number of versions. 
+
 
 Get Version of a Dataset
 ~~~~~~~~~~~~~~~~~~~~~~~~
@@ -828,13 +990,26 @@ Get Version of a Dataset
   export ID=24
   export VERSION=1.0
 
-  curl $SERVER_URL/api/datasets/$ID/versions/$VERSION
+  curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION?includeFiles=false"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/datasets/24/versions/1.0
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?includeFiles=false"
+
+The optional ``includeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``true``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. 
+
+
+By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below.
+
+If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?includeDeaccessioned=true"
 
 .. _export-dataset-metadata-api:
 
@@ -851,13 +1026,13 @@ See also :ref:`batch-exports-through-the-api` and the note below:
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
   export METADATA_FORMAT=ddi
 
-  curl $SERVER_URL/api/datasets/export?exporter=$METADATA_FORMAT&persistentId=PERSISTENT_IDENTIFIER
+  curl "$SERVER_URL/api/datasets/export?exporter=$METADATA_FORMAT&persistentId=PERSISTENT_IDENTIFIER"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/datasets/export?exporter=ddi&persistentId=doi:10.5072/FK2/J8SJZB
+  curl "https://demo.dataverse.org/api/datasets/export?exporter=ddi&persistentId=doi:10.5072/FK2/J8SJZB"
 
 .. note:: Supported exporters (export formats) are ``ddi``, ``oai_ddi``, ``dcterms``, ``oai_dc``, ``schema.org`` , ``OAI_ORE`` , ``Datacite``, ``oai_datacite`` and ``dataverse_json``. Descriptive names can be found under :ref:`metadata-export-formats` in the User Guide.
 
@@ -883,13 +1058,183 @@ List Files in a Dataset
   export ID=24
   export VERSION=1.0
 
-  curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/files
+  curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/files"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
  
-  curl https://demo.dataverse.org/api/datasets/24/versions/1.0/files
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files"
+
+This endpoint supports optional pagination, through the ``limit`` and ``offset`` query parameters:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?limit=10&offset=20"
+
+Category name filtering is also optionally supported. To return files to which the requested category has been added.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?categoryName=Data"
+
+Tabular tag name filtering is also optionally supported. To return files to which the requested tabular tag has been added.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?tabularTagName=Survey"
+
+Content type filtering is also optionally supported. To return files matching the requested content type.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?contentType=image/png"
+
+Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return the files that contain the text searched in one of such fields.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?searchText=word"
+
+File access filtering is also optionally supported. In particular, by the following possible values:
+
+* ``Public``
+* ``Restricted``
+* ``EmbargoedThenRestricted``
+* ``EmbargoedThenPublic``
+
+If no filter is specified, the files will match all of the above categories.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?accessStatus=Public"
+
+Ordering criteria for sorting the results is also optionally supported. In particular, by the following possible values:
+
+* ``NameAZ`` (Default)
+* ``NameZA``
+* ``Newest``
+* ``Oldest``
+* ``Size``
+* ``Type``
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?orderCriteria=Newest"
+
+Please note that both filtering and ordering criteria values are case sensitive and must be correctly typed for the endpoint to recognize them.
+
+By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below.
+
+If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?includeDeaccessioned=true"
+
+.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for.
+
+Get File Counts in a Dataset
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Get file counts, for the given dataset and version.
+
+The returned file counts are based on different criteria:
+
+- Total (The total file count)
+- Per content type
+- Per category name
+- Per tabular tag name
+- Per access status (Possible values: Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic)
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+  export VERSION=1.0
+
+  curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/files/counts"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts"
+
+Category name filtering is optionally supported. To return counts only for files to which the requested category has been added.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?categoryName=Data"
+
+Tabular tag name filtering is also optionally supported. To return counts only for files to which the requested tabular tag has been added.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?tabularTagName=Survey"
+
+Content type filtering is also optionally supported. To return counts only for files matching the requested content type.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?contentType=image/png"
+
+Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return counts only for files that contain the text searched in one of such fields.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?searchText=word"
+
+File access filtering is also optionally supported. In particular, by the following possible values:
+
+* ``Public``
+* ``Restricted``
+* ``EmbargoedThenRestricted``
+* ``EmbargoedThenPublic``
+
+If no filter is specified, the files will match all of the above categories.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?accessStatus=Public"
+
+By default, deaccessioned dataset versions are not supported by this endpoint and will be ignored in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a not found error if the version is deaccessioned and you do not enable the option described below.
+
+If you want to include deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?includeDeaccessioned=true"
+
+Please note that filtering values are case sensitive and must be correctly typed for the endpoint to recognize them.
+
+Keep in mind that you can combine all of the above query parameters depending on the results you are looking for.
 
 View Dataset Files and Folders as a Directory Index
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -898,9 +1243,9 @@ View Dataset Files and Folders as a Directory Index
 
 .. code-block:: bash
 
-  curl $SERVER_URL/api/datasets/${ID}/dirindex/
+  curl "$SERVER_URL/api/datasets/${ID}/dirindex/"
   # or
-  curl ${SERVER_URL}/api/datasets/:persistentId/dirindex?persistentId=doi:${PERSISTENT_ID}
+  curl "${SERVER_URL}/api/datasets/:persistentId/dirindex?persistentId=doi:${PERSISTENT_ID}"
 
 
 Optional parameters:
@@ -998,13 +1343,13 @@ List All Metadata Blocks for a Dataset
   export ID=24
   export VERSION=1.0
 
-  curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata
+  curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata"
 
 List Single Metadata Block for a Dataset
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1018,13 +1363,13 @@ List Single Metadata Block for a Dataset
   export VERSION=1.0
   export METADATA_BLOCK=citation
 
-  curl $SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata/$METADATA_BLOCK
+  curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/metadata/$METADATA_BLOCK"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata/citation
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/metadata/citation"
 
 Update Metadata For a Dataset
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1041,15 +1386,15 @@ For example, after making your edits, your JSON file might look like :download:`
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-update-metadata.json
+  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/versions/:draft?persistentId=$PERSISTENT_IDENTIFIER" --upload-file dataset-update-metadata.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-update-metadata.json
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/versions/:draft?persistentId=doi:10.5072/FK2/BCCP9Z" --upload-file dataset-update-metadata.json
 
-Note that in the example JSON file above, there is a single JSON object with ``metadataBlocks`` as a key. When you download a representation of your dataset in JSON format, the ``metadataBlocks`` object you need is nested inside another object called ``datasetVersion``. To extract just the ``metadataBlocks`` key when downloading a JSON representation, you can use a tool such as ``jq`` like this:
+Note that in the example JSON file above, there are only two JSON objects with the ``license`` and ``metadataBlocks`` keys respectively. When you download a representation of your latest dataset version in JSON format, these objects will be nested inside another object called ``data`` in the API response. Note that there may be more objects in there, in addition to the ``license`` and ``metadataBlocks`` that you may need to preserve and re-import as well. Basically, you need everything in there except for the ``files``. This can be achived by downloading the metadata and selecting the sections you need with a JSON tool such as ``jq``, like this:
 
 .. code-block:: bash
 
@@ -1057,15 +1402,18 @@ Note that in the example JSON file above, there is a single JSON object with ``m
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z
 
-  curl -H "X-Dataverse-key: $API_TOKEN" $SERVER_URL/api/datasets/:persistentId/versions/:latest?persistentId=$PERSISTENT_IDENTIFIER | jq '.data | {metadataBlocks: .metadataBlocks}' > dataset-update-metadata.json
-
+  curl -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/versions/:latest?persistentId=$PERSISTENT_IDENTIFIER" | jq '.data | del(.files)' > dataset-update-metadata.json
+  
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/datasets/:persistentId/versions/:latest?persistentId=doi:10.5072/FK2/BCCP9Z | jq '.data | {metadataBlocks: .metadataBlocks}' > dataset-update-metadata.json
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/:persistentId/versions/:latest?persistentId=doi:10.5072/FK2/BCCP9Z" | jq '.data | {metadataBlocks: .metadataBlocks}' > dataset-update-metadata.json
 
-Now that the resulting JSON file only contains the ``metadataBlocks`` key, you can edit the JSON such as with ``vi`` in the example below::
+
+Now you can edit the JSON produced by the command above with a text editor of your choice. For example, with ``vi`` in the example below.
+
+Note that you don't need to edit the top-level fields such as ``versionNumber``, ``minorVersonNumber``, ``versionState`` or any of the time stamps - these will be automatically updated as needed by the API::
 
     vi dataset-update-metadata.json
 
@@ -1084,13 +1432,13 @@ Alternatively to replacing an entire dataset version with its JSON representatio
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/editMetadata/?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-add-metadata.json
+  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/editMetadata/?persistentId=$PERSISTENT_IDENTIFIER" --upload-file dataset-add-metadata.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-add-metadata.json
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z" --upload-file dataset-add-metadata.json
 
 You may also replace existing metadata in dataset fields with the following (adding the parameter replace=true):
 
@@ -1100,13 +1448,13 @@ You may also replace existing metadata in dataset fields with the following (add
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true --upload-file dataset-update-metadata.json
+  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/editMetadata?persistentId=$PERSISTENT_IDENTIFIER&replace=true" --upload-file dataset-update-metadata.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true --upload-file dataset-update-metadata.json
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/editMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z&replace=true" --upload-file dataset-update-metadata.json
 
 For these edits your JSON file need only include those dataset fields which you would like to edit. A sample JSON file may be downloaded here: :download:`dataset-edit-metadata-sample.json <../_static/api/dataset-edit-metadata-sample.json>` 
 
@@ -1121,13 +1469,13 @@ You may delete some of the metadata of a dataset version by supplying a file wit
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/BCCP9Z
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/deleteMetadata/?persistentId=$PERSISTENT_IDENTIFIER --upload-file dataset-delete-author-metadata.json
+  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/deleteMetadata/?persistentId=$PERSISTENT_IDENTIFIER" --upload-file dataset-delete-author-metadata.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/deleteMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z --upload-file dataset-delete-author-metadata.json
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/deleteMetadata/?persistentId=doi:10.5072/FK2/BCCP9Z" --upload-file dataset-delete-author-metadata.json
 
 For these deletes your JSON file must include an exact match of those dataset fields which you would like to delete. A sample JSON file may be downloaded here: :download:`dataset-delete-author-metadata.json <../_static/api/dataset-delete-author-metadata.json>` 
 
@@ -1176,13 +1524,46 @@ Deletes the draft version of dataset ``$ID``. Only the draft version can be dele
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/versions/:draft
+  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/versions/:draft"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/versions/:draft"
+
+Deaccession Dataset
+~~~~~~~~~~~~~~~~~~~
+
+Given a version of a dataset, updates its status to deaccessioned.
+
+The JSON body required to deaccession a dataset (``deaccession.json``) looks like this::
+
+  {
+    "deaccessionReason": "Description of the deaccession reason.",
+    "deaccessionForwardURL": "https://demo.dataverse.org"
+  }
+
+
+Note that the field ``deaccessionForwardURL`` is optional.
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+  export VERSIONID=1.0
+  export FILE_PATH=deaccession.json
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -H "Content-type:application/json" --upload-file $FILE_PATH
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/versions/:draft
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -H "Content-type:application/json" --upload-file deaccession.json
+
+.. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be published since it is already deaccessioned.
 
 Set Citation Date Field Type for a Dataset
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1197,13 +1578,13 @@ Note that the dataset citation date field type must be a date field.
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
   export DATASET_FIELD_TYPE_NAME=dateOfDeposit
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER --data "$DATASET_FIELD_TYPE_NAME"
+  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER" --data "$DATASET_FIELD_TYPE_NAME"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB --data "dateOfDeposit"
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB" --data "dateOfDeposit"
 
 Revert Citation Date Field Type to Default for Dataset
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1216,13 +1597,13 @@ Restores the default citation date field type, ``:publicationDate``, for a given
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER
+  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/:persistentId/citationdate?persistentId=$PERSISTENT_IDENTIFIER"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/:persistentId/citationdate?persistentId=doi:10.5072/FK2/J8SJZB"
 
 .. _list-roles-on-a-dataset-api:
 
@@ -1237,13 +1618,13 @@ Lists all role assignments on a given dataset:
   export SERVER_URL=https://demo.dataverse.org
   export ID=2347
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/assignments
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/$ID/assignments"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx  https://demo.dataverse.org/api/datasets/2347/assignments 
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"  "https://demo.dataverse.org/api/datasets/2347/assignments"
   
 .. _assign-role-on-a-dataset-api:
 
@@ -1258,13 +1639,13 @@ Assigns a new role, based on the POSTed JSON:
   export SERVER_URL=https://demo.dataverse.org
   export ID=2347
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" $SERVER_URL/api/datasets/$ID/assignments --upload-file role.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-Type: application/json" "$SERVER_URL/api/datasets/$ID/assignments" --upload-file role.json
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-Type: application/json" https://demo.dataverse.org/api/datasets/2347/assignments --upload-file role.json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-Type: application/json" "https://demo.dataverse.org/api/datasets/2347/assignments" --upload-file role.json
 
 POSTed JSON example (the content of ``role.json`` file)::
 
@@ -1287,13 +1668,13 @@ Delete the assignment whose id is ``$id``:
   export ID=2347
   export ASSIGNMENT_ID=6
 
-  curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/datasets/$ID/assignments/$ASSIGNMENT_ID
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/assignments/$ASSIGNMENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/datasets/2347/assignments/6
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/2347/assignments/6"
 
 
 Create a Private URL for a Dataset
@@ -1307,20 +1688,20 @@ Create a Private URL (must be able to manage dataset permissions):
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/datasets/$ID/privateUrl
+  curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/privateUrl"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/privateUrl
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/privateUrl"
   
 If Anonymized Access has been enabled on a Dataverse installation (see the :ref:`:AnonymizedFieldTypeNames` setting), an optional 'anonymizedAccess' query parameter is allowed.
 Setting anonymizedAccess=true in your call will create a PrivateURL that only allows an anonymized view of the Dataset (see :ref:`privateurl`).
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/privateUrl?anonymizedAccess=true
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/privateUrl?anonymizedAccess=true"
 
 
 Get the Private URL for a Dataset
@@ -1334,13 +1715,13 @@ Get a Private URL from a dataset (if available):
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key: $API_TOKEN" $SERVER_URL/api/datasets/$ID/privateUrl
+  curl -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/$ID/privateUrl"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/datasets/24/privateUrl
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/privateUrl"
 
 Delete the Private URL from a Dataset
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1353,13 +1734,13 @@ Delete a Private URL from a dataset (if it exists):
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/privateUrl
+  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/privateUrl"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/privateUrl
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/privateUrl"
 
 .. _add-file-api: 
 
@@ -1386,13 +1767,13 @@ In the curl example below, all of the above are specified but they are optional.
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST -F "file=@$FILENAME" -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "tabIngest":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F "file=@$FILENAME" -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "tabIngest":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "tabIngest":"false"}' "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB"
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F file=@data.tsv -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "tabIngest":"false"}' "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB"
 
 You should expect a 201 ("CREATED") response and JSON indicating the database id that has been assigned to your newly uploaded file.
 
@@ -1510,7 +1891,7 @@ The fully expanded example above (without environment variables) looks like this
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB -F 'jsonData={"description":"A remote image.","storageIdentifier":"trsa://themes/custom/qdr/images/CoreTrustSeal-logo-transparent.png","checksumType":"MD5","md5Hash":"509ef88afa907eaf2c17c1c8d8fde77e","label":"testlogo.png","fileName":"testlogo.png","mimeType":"image/png"}'
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB" -F 'jsonData={"description":"A remote image.","storageIdentifier":"trsa://themes/custom/qdr/images/CoreTrustSeal-logo-transparent.png","checksumType":"MD5","md5Hash":"509ef88afa907eaf2c17c1c8d8fde77e","label":"testlogo.png","fileName":"testlogo.png","mimeType":"image/png"}'
 
 .. _cleanup-storage-api:
 
@@ -1542,7 +1923,7 @@ The fully expanded example above (without environment variables) looks like this
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X GET https://demo.dataverse.org/api/datasets/:persistentId/cleanStorage?persistentId=doi:10.5072/FK2/J8SJZB&dryrun=true
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/datasets/:persistentId/cleanStorage?persistentId=doi:10.5072/FK2/J8SJZB&dryrun=true"
 
 Adding Files To a Dataset via Other Tools
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1562,13 +1943,13 @@ Shows the combined size in bytes of all the files uploaded into the dataset ``id
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/storagesize
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/$ID/storagesize"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/datasets/24/storagesize
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/storagesize"
 
 The size of published and unpublished files will be summed in the dataset specified. 
 By default, only the archival files are counted - i.e., the files uploaded by users (plus the tab-delimited versions generated for tabular data files on ingest). If the optional argument ``includeCached=true`` is specified, the API will also add the sizes of all the extra files generated and cached by the Dataverse installation - the resized thumbnail versions for image files, the metadata exports for published datasets, etc. Because this deals with unpublished files the token supplied must have permission to view unpublished drafts. 
@@ -1586,67 +1967,138 @@ Shows the combined size in bytes of all the files available for download from ve
   export ID=24
   export VERSIONID=1.0
 
-  curl -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/datasets/$ID/versions/$VERSIONID/downloadsize
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/downloadsize"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize"
 
 The size of all files available for download will be returned. 
 If :draft is passed as versionId the token supplied must have permission to view unpublished drafts. A token is not required for published datasets. Also restricted files will be included in this total regardless of whether the user has access to download the restricted file(s).
 
-Submit a Dataset for Review
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+There is an optional query parameter ``mode`` which applies a filter criteria to the operation. This parameter supports the following values:
 
-When dataset authors do not have permission to publish directly, they can click the "Submit for Review" button in the web interface (see :doc:`/user/dataset-management`), or perform the equivalent operation via API:
+* ``All`` (Default): Includes both archival and original sizes for tabular files
+* ``Archival``: Includes only the archival size for tabular files
+* ``Original``: Includes only the original size for tabular files
+
+Usage example:
 
 .. code-block:: bash
 
-  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
-  export SERVER_URL=https://demo.dataverse.org
-  export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?mode=Archival"
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/submitForReview?persistentId=$PERSISTENT_ID"
+Category name filtering is also optionally supported. To return the size of all files available for download matching the requested category name.
 
-The fully expanded example above (without environment variables) looks like this:
+Usage example:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/submitForReview?persistentId=doi:10.5072/FK2/J8SJZB"
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?categoryName=Data"
 
-The people who need to review the dataset (often curators or journal editors) can check their notifications periodically via API to see if any new datasets have been submitted for review and need their attention. See the :ref:`Notifications` section for details. Alternatively, these curators can simply check their email or notifications to know when datasets have been submitted (or resubmitted) for review.
+Tabular tag name filtering is also optionally supported. To return the size of all files available for download for which the requested tabular tag has been added.
 
-Return a Dataset to Author
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+Usage example:
 
-After the curators or journal editors have reviewed a dataset that has been submitted for review (see "Submit for Review", above) they can either choose to publish the dataset (see the ``:publish`` "action" above) or return the dataset to its authors. In the web interface there is a "Return to Author" button (see :doc:`/user/dataset-management`), but the interface does not provide a way to explain **why** the dataset is being returned. There is a way to do this outside of this interface, however. Instead of clicking the "Return to Author" button in the UI, a curator can write a "reason for return" into the database via API.
+.. code-block:: bash
 
-Here's how curators can send a "reason for return" to the dataset authors. First, the curator creates a JSON file that contains the reason for return:
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?tabularTagName=Survey"
 
-.. literalinclude:: ../_static/api/reason-for-return.json
+Content type filtering is also optionally supported. To return the size of all files available for download matching the requested content type.
 
-In the example below, the curator has saved the JSON file as :download:`reason-for-return.json <../_static/api/reason-for-return.json>` in their current working directory. Then, the curator sends this JSON file to the ``returnToAuthor`` API endpoint like this:
+Usage example:
 
 .. code-block:: bash
 
-  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
-  export SERVER_URL=https://demo.dataverse.org
-  export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?contentType=image/png"
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/returnToAuthor?persistentId=$PERSISTENT_ID" -H "Content-type: application/json" -d @reason-for-return.json
+Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return the size of all files available for download that contain the text searched in one of such fields.
 
-The fully expanded example above (without environment variables) looks like this:
+Usage example:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/returnToAuthor?persistentId=doi:10.5072/FK2/J8SJZB" -H "Content-type: application/json" -d @reason-for-return.json
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?searchText=word"
 
-The review process can sometimes resemble a tennis match, with the authors submitting and resubmitting the dataset over and over until the curators are satisfied. Each time the curators send a "reason for return" via API, that reason is persisted into the database, stored at the dataset version level.
+File access filtering is also optionally supported. In particular, by the following possible values:
 
-Link a Dataset
-~~~~~~~~~~~~~~
+* ``Public``
+* ``Restricted``
+* ``EmbargoedThenRestricted``
+* ``EmbargoedThenPublic``
+
+If no filter is specified, the files will match all of the above categories.
+
+Please note that filtering query parameters are case sensitive and must be correctly typed for the endpoint to recognize them.
+
+By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below.
+
+If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?includeDeaccessioned=true"
+
+.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for.
+
+Submit a Dataset for Review
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When dataset authors do not have permission to publish directly, they can click the "Submit for Review" button in the web interface (see :doc:`/user/dataset-management`), or perform the equivalent operation via API:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB
+
+  curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/submitForReview?persistentId=$PERSISTENT_ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/submitForReview?persistentId=doi:10.5072/FK2/J8SJZB"
+
+The people who need to review the dataset (often curators or journal editors) can check their notifications periodically via API to see if any new datasets have been submitted for review and need their attention. See the :ref:`Notifications` section for details. Alternatively, these curators can simply check their email or notifications to know when datasets have been submitted (or resubmitted) for review.
+
+.. _return-a-dataset:
+
+Return a Dataset to Author
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+After the curators or journal editors have reviewed a dataset that has been submitted for review (see "Submit for Review", above) they can either choose to publish the dataset (see the ``:publish`` "action" above) or return the dataset to its authors. In the web interface there is a "Return to Author" button (see :doc:`/user/dataset-management`), but the interface does not provide a way to explain **why** the dataset is being returned. There is a way to do this outside of this interface, however. Instead of clicking the "Return to Author" button in the UI, a curator can write a "reason for return" into the database via API.
+
+Here's how curators can send a "reason for return" to the dataset authors. First, the curator creates a JSON file that contains the reason for return:
+
+.. literalinclude:: ../_static/api/reason-for-return.json
+
+In the example below, the curator has saved the JSON file as :download:`reason-for-return.json <../_static/api/reason-for-return.json>` in their current working directory. Then, the curator sends this JSON file to the ``returnToAuthor`` API endpoint like this:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB
+
+  curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/:persistentId/returnToAuthor?persistentId=$PERSISTENT_ID" -H "Content-type: application/json" -d @reason-for-return.json
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/:persistentId/returnToAuthor?persistentId=doi:10.5072/FK2/J8SJZB" -H "Content-type: application/json" -d @reason-for-return.json
+
+The review process can sometimes resemble a tennis match, with the authors submitting and resubmitting the dataset over and over until the curators are satisfied. Each time the curators send a "reason for return" via API, that reason is persisted into the database, stored at the dataset version level.
+
+The :ref:`send-feedback` API call may be useful as a way to move the conversation to email. However, note that these emails go to contacts (versus authors) and there is no database record of the email contents. (:ref:`dataverse.mail.cc-support-on-contact-email` will send a copy of these emails to the support email address which would provide a record.)
+
+Link a Dataset
+~~~~~~~~~~~~~~
 
 Creates a link between a dataset and a Dataverse collection (see :ref:`dataset-linking` section of Dataverse Collection Management in the User Guide for more information):
 
@@ -1657,13 +2109,13 @@ Creates a link between a dataset and a Dataverse collection (see :ref:`dataset-l
   export DATASET_ID=24
   export DATAVERSE_ID=test
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT $SERVER_URL/api/datasets/$DATASET_ID/link/$DATAVERSE_ID
+  curl -H "X-Dataverse-key: $API_TOKEN" -X PUT "$SERVER_URL/api/datasets/$DATASET_ID/link/$DATAVERSE_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/datasets/24/link/test
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/datasets/24/link/test"
 
 Dataset Locks
 ~~~~~~~~~~~~~
@@ -1678,13 +2130,13 @@ To check if a dataset is locked:
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl $SERVER_URL/api/datasets/$ID/locks
+  curl "$SERVER_URL/api/datasets/$ID/locks"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/datasets/24/locks
+  curl "https://demo.dataverse.org/api/datasets/24/locks"
 
 Optionally, you can check if there's a lock of a specific type on the dataset:
 
@@ -1734,13 +2186,13 @@ The following API end point will lock a Dataset with a lock of specified type. N
   export ID=24
   export LOCK_TYPE=Ingest
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X POST $SERVER_URL/api/datasets/$ID/lock/$LOCK_TYPE
+  curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/lock/$LOCK_TYPE"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/datasets/24/lock/Ingest
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/lock/Ingest"
 
 Use the following API to unlock the dataset, by deleting all the locks currently on the dataset. Note that this requires “superuser” credentials:
 
@@ -1750,13 +2202,13 @@ Use the following API to unlock the dataset, by deleting all the locks currently
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/locks
+  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/locks"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/locks
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/locks"
 
 Or, to delete a lock of the type specified only. Note that this requires “superuser” credentials:
 
@@ -1767,13 +2219,13 @@ Or, to delete a lock of the type specified only. Note that this requires “supe
   export ID=24
   export LOCK_TYPE=finalizePublication
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/locks?type=$LOCK_TYPE
+  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/locks?type=$LOCK_TYPE"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/locks?type=finalizePublication
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/locks?type=finalizePublication"
 
 If the dataset is not locked (or if there is no lock of the specified type), the API will exit with a warning message.
 
@@ -1922,13 +2374,13 @@ Delete the dataset whose id is passed:
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID
+  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24"
 
 Delete Published Dataset
 ~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1941,13 +2393,13 @@ Normally published datasets should not be deleted, but there exists a "destroy"
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/:persistentId/destroy/?persistentId=$PERSISTENT_ID
+  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/:persistentId/destroy/?persistentId=$PERSISTENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/:persistentId/destroy/?persistentId=doi:10.5072/FK2/AAA000
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/:persistentId/destroy/?persistentId=doi:10.5072/FK2/AAA000"
 
 Delete with dataset identifier:
 
@@ -1957,13 +2409,13 @@ Delete with dataset identifier:
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE $SERVER_URL/api/datasets/$ID/destroy
+  curl -H "X-Dataverse-key: $API_TOKEN" -X DELETE "$SERVER_URL/api/datasets/$ID/destroy"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/datasets/24/destroy
+  curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/destroy"
   
 Calling the destroy endpoint is permanent and irreversible. It will remove the dataset and its datafiles, then re-index the parent Dataverse collection in Solr. This endpoint requires the API token of a superuser.
 
@@ -2014,10 +2466,12 @@ The API call requires a Json body that includes the list of the fileIds that the
   curl -H "X-Dataverse-key: $API_TOKEN" -H "Content-Type:application/json" "$SERVER_URL/api/datasets/:persistentId/files/actions/:unset-embargo?persistentId=$PERSISTENT_IDENTIFIER" -d "$JSON"
   
   
+.. _Archival Status API:
+
 Get the Archival Status of a Dataset By Version
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Archiving is an optional feature that may be configured for a Dataverse installation. When that is enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials.
+Archival :ref:`BagIt Export` is an optional feature that may be configured for a Dataverse installation. When that is enabled, this API call be used to retrieve the status. Note that this requires "superuser" credentials.
 
 ``GET /api/datasets/$dataset-id/$version/archivalStatus`` returns the archival status of the specified dataset version.
 
@@ -2084,10 +2538,155 @@ The response is a JSON object described in the :doc:`/api/external-tools` sectio
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/7U7YBV
   export VERSION=1.0
   export TOOL_ID=1
-  
 
   curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/toolparams/$TOOL_ID?persistentId=$PERSISTENT_IDENTIFIER"
 
+.. _signposting-api:
+
+Retrieve Signposting Information
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Dataverse supports :ref:`discovery-sign-posting` as a discovery mechanism.
+Signposting involves the addition of a `Link <https://tools.ietf.org/html/rfc5988>`__ HTTP header providing summary information on GET and HEAD requests to retrieve the dataset page and a separate /linkset API call to retrieve additional information.
+
+Here is an example of a "Link" header:
+
+``Link: <https://doi.org/10.5072/FK2/YD5QDG>;rel="cite-as", <https://doi.org/10.5072/FK2/YD5QDG>;rel="describedby";type="application/vnd.citationstyles.csl+json",<https://demo.dataverse.org/api/datasets/export?exporter=schema.org&persistentId=doi:10.5072/FK2/YD5QDG>;rel="describedby";type="application/ld+json", <https://schema.org/AboutPage>;rel="type",<https://schema.org/Dataset>;rel="type", <https://demo.dataverse.org/api/datasets/:persistentId/versions/1.0/customlicense?persistentId=doi:10.5072/FK2/YD5QDG>;rel="license", <https://demo.dataverse.org/api/datasets/:persistentId/versions/1.0/linkset?persistentId=doi:10.5072/FK2/YD5QDG> ; rel="linkset";type="application/linkset+json"``
+
+The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above.
+
+The reponse includes a JSON object conforming to the `Signposting <https://signposting.org>`__ specification. As part of this conformance, unlike most Dataverse API responses, the output is not wrapped in a ``{"status":"OK","data":{`` object.
+Signposting is not supported for draft dataset versions.
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG
+  export VERSION=1.0
+
+  curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/linkset?persistentId=$PERSISTENT_IDENTIFIER"
+
+Get Dataset By Private URL Token
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+  export PRIVATE_URL_TOKEN=a56444bc-7697-4711-8964-e0577f055fd2
+
+  curl "$SERVER_URL/api/datasets/privateUrlDatasetVersion/$PRIVATE_URL_TOKEN"
+
+.. _get-citation:
+
+Get Citation
+~~~~~~~~~~~~
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG
+  export VERSION=1.0
+
+  curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation?persistentId=$PERSISTENT_IDENTIFIER"
+
+By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below.
+
+If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``.
+
+Usage example:
+
+.. code-block:: bash
+
+  curl -H "Accept:application/json" "$SERVER_URL/api/datasets/:persistentId/versions/$VERSION/{version}/citation?persistentId=$PERSISTENT_IDENTIFIER&includeDeaccessioned=true"
+
+Get Citation by Private URL Token
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+  export PRIVATE_URL_TOKEN=a56444bc-7697-4711-8964-e0577f055fd2
+
+  curl "$SERVER_URL/api/datasets/privateUrlDatasetVersion/$PRIVATE_URL_TOKEN/citation"
+
+.. _get-dataset-summary-field-names:
+
+Get Summary Field Names
+~~~~~~~~~~~~~~~~~~~~~~~
+
+See :ref:`:CustomDatasetSummaryFields` in the Installation Guide for how the list of dataset fields that summarize a dataset can be customized. Here's how to list them:
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+
+  curl "$SERVER_URL/api/datasets/summaryFieldNames"
+
+.. _guestbook-at-request-api:
+  
+Configure When a Dataset Guestbook Appears (If Enabled)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+By default, users are asked to fill out a configured Guestbook when they down download files from a dataset. If enabled for a given Dataverse instance (see XYZ), users may instead be asked to fill out a Guestbook only when they request access to restricted files.
+This is configured by a global default, collection-level settings, or directly at the dataset level via these API calls (superuser access is required to make changes).
+
+To see the current choice for this dataset:
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG
+
+  curl "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER"
+  
+  
+  The response will be true (guestbook displays when making a request), false (guestbook displays at download), or will indicate that the dataset inherits one of these settings.
+
+To set the behavior for this dataset:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG
+
+  curl -X PUT -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json -d true "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER"
+
+
+  This example uses true to set the behavior to guestbook at request. Note that this call will return a 403/Forbidden response if guestbook at request functionality is not enabled for this Dataverse instance.
+  
+The API can also be used to reset the dataset to use the default/inherited value:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/YD5QDG
+
+  curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER"
+
+Get User Permissions on a Dataset
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This API call returns the permissions that the calling user has on a particular dataset.
+
+In particular, the user permissions that this API call checks, returned as booleans, are the following:
+
+* Can view the unpublished dataset
+* Can edit the dataset
+* Can publish the dataset
+* Can manage the dataset permissions
+* Can delete the dataset draft
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+
+  curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/datasets/$ID/userPermissions"
+
+
 Files
 -----
 
@@ -2104,13 +2703,13 @@ Example: Getting the file whose DOI is *10.5072/FK2/J8SJZB*:
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB"
 
 You may get its draft version of an unpublished file if you pass an api token with view draft permissions:
 
@@ -2120,13 +2719,13 @@ You may get its draft version of an unpublished file if you pass an api token wi
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER/api/files/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB"
 
 
 |CORS| Show the file whose id is passed:
@@ -2136,13 +2735,13 @@ The fully expanded example above (without environment variables) looks like this
   export SERVER_URL=https://demo.dataverse.org
   export ID=408730
 
-  curl $SERVER_URL/api/file/$ID
+  curl "$SERVER_URL/api/file/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/files/408730
+  curl "https://demo.dataverse.org/api/files/408730"
 
 You may get its draft version of an published file if you pass an api token with view draft permissions and use the draft path parameter:
 
@@ -2152,13 +2751,13 @@ You may get its draft version of an published file if you pass an api token with
   export PERSISTENT_IDENTIFIER=doi:10.5072/FK2/J8SJZB
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER/api/files/:persistentId/draft/?persistentId=$PERSISTENT_IDENTIFIER
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER/api/files/:persistentId/draft/?persistentId=$PERSISTENT_IDENTIFIER"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/:persistentId/draft/?persistentId=doi:10.5072/FK2/J8SJZB
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/draft/?persistentId=doi:10.5072/FK2/J8SJZB"
 
 The file id can be extracted from the response retrieved from the API which uses the persistent identifier (``/api/datasets/:persistentId/?persistentId=$PERSISTENT_IDENTIFIER``).
 
@@ -2209,13 +2808,13 @@ A curl example using an ``id``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true $SERVER_URL/api/files/$ID/restrict
+  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true "$SERVER_URL/api/files/$ID/restrict"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -d true https://demo.dataverse.org/api/files/24/restrict
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT -d true "https://demo.dataverse.org/api/files/24/restrict"
 
 A curl example using a ``pid``
 
@@ -2225,7 +2824,7 @@ A curl example using a ``pid``
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true $SERVER_URL/api/files/:persistentId/restrict?persistentId=$PERSISTENT_ID
+  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT -d true "$SERVER_URL/api/files/:persistentId/restrict?persistentId=$PERSISTENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
@@ -2246,13 +2845,13 @@ A curl example using an ``ID``:
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/uningest
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/uningest"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/uningest
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/uningest"
 
 A curl example using a ``PERSISTENT_ID``:
 
@@ -2268,14 +2867,263 @@ The fully expanded example above (without environment variables) looks like this
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/uningest?persistentId=doi:10.5072/FK2/AAA000"
-
-Reingest a File
-~~~~~~~~~~~~~~~
-
-Attempt to ingest an existing datafile as tabular data. This API can be used on a file that was not ingested as tabular back when it was uploaded. For example, a Stata v.14 file that was uploaded before ingest support for Stata 14 was added (in Dataverse Software v.4.9). It can also be used on a file that failed to ingest due to a bug in the ingest plugin that has since been fixed (hence the name "reingest").
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/uningest?persistentId=doi:10.5072/FK2/AAA000"
+
+Reingest a File
+~~~~~~~~~~~~~~~
+
+Attempt to ingest an existing datafile as tabular data. This API can be used on a file that was not ingested as tabular back when it was uploaded. For example, a Stata v.14 file that was uploaded before ingest support for Stata 14 was added (in Dataverse Software v.4.9). It can also be used on a file that failed to ingest due to a bug in the ingest plugin that has since been fixed (hence the name "reingest").
+
+Note that this requires "superuser" credentials.
+
+A curl example using an ``ID``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/reingest"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/reingest"
+
+A curl example using a ``PERSISTENT_ID``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_ID=doi:10.5072/FK2/AAA000
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/reingest?persistentId=$PERSISTENT_ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/reingest?persistentId=doi:10.5072/FK2/AAA000"
+
+Note: at present, the API cannot be used on a file that's already successfully ingested as tabular.
+
+.. _redetect-file-type:
+
+Redetect File Type
+~~~~~~~~~~~~~~~~~~
+
+The Dataverse Software uses a variety of methods for determining file types (MIME types or content types) and these methods (listed below) are updated periodically. If you have files that have an unknown file type, you can have the Dataverse Software attempt to redetect the file type.
+
+When using the curl command below, you can pass ``dryRun=true`` if you don't want any changes to be saved to the database. Change this to ``dryRun=false`` (or omit it) to save the change.
+
+A curl example using an ``id``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/redetect?dryRun=true"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/redetect?dryRun=true"
+
+A curl example using a ``pid``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_ID=doi:10.5072/FK2/AAA000
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/redetect?persistentId=$PERSISTENT_ID&dryRun=true"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/redetect?persistentId=doi:10.5072/FK2/AAA000&dryRun=true"
+
+Currently the following methods are used to detect file types:
+
+- The file type detected by the browser (or sent via API).
+- JHOVE: https://jhove.openpreservation.org
+- The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``.
+- The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``.
+
+.. _extractNcml:
+
+Extract NcML
+~~~~~~~~~~~~
+
+As explained in the :ref:`netcdf-and-hdf5` section of the User Guide, when those file types are uploaded, an attempt is made to extract an NcML file from them and store it as an auxiliary file.
+
+This happens automatically but superusers can also manually trigger this NcML extraction process with the API endpoint below.
+
+Note that "true" will be returned if an NcML file was created. "false" will be returned if there was an error or if the NcML file already exists (check server.log for details).
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/extractNcml"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/extractNcml"
+
+A curl example using a PID:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_ID=doi:10.5072/FK2/AAA000
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/extractNcml?persistentId=$PERSISTENT_ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/extractNcml?persistentId=doi:10.5072/FK2/AAA000"
+
+Replacing Files
+~~~~~~~~~~~~~~~
+
+Replace an existing file where ``ID`` is the database id of the file to replace or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires the ``file`` to be passed as well as a ``jsonString`` expressing the new metadata.  Note that metadata such as description, directoryLabel (File Path) and tags are not carried over from the file being replaced.
+
+Note that when a Dataverse installation is configured to use S3 storage with direct upload enabled, there is API support to send a replacement file directly to S3. This is more complex and is described in the :doc:`/developers/s3-direct-upload-api` guide.
+
+A curl example using an ``ID``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@file.extension' -F 'jsonData={json}' "$SERVER_URL/api/files/$ID/replace"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F 'file=@data.tsv' \
+    -F 'jsonData={"description":"My description.","categories":["Data"],"forceReplace":false}' \
+    "https://demo.dataverse.org/api/files/24/replace"
+
+A curl example using a ``PERSISTENT_ID``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_ID=doi:10.5072/FK2/AAA000
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@file.extension' -F 'jsonData={json}' \
+    "$SERVER_URL/api/files/:persistentId/replace?persistentId=$PERSISTENT_ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F 'file=@data.tsv' \
+    -F 'jsonData={"description":"My description.","categories":["Data"],"forceReplace":false}' \
+    "https://demo.dataverse.org/api/files/:persistentId/replace?persistentId=doi:10.5072/FK2/AAA000"
+
+Deleting Files
+~~~~~~~~~~~~~~
+
+Delete an existing file where ``ID`` is the database id of the file to delete or ``PERSISTENT_ID`` is the persistent id (DOI or Handle, if it exists) of the file.
+
+Note that the behavior of deleting files depends on if the dataset has ever been published or not.
+
+- If the dataset has never been published, the file will be deleted forever.
+- If the dataset has published, the file is deleted from the draft (and future published versions).
+- If the dataset has published, the deleted file can still be downloaded because it was part of a published version.
+
+A curl example using an ``ID``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/files/$ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/files/24"
+
+A curl example using a ``PERSISTENT_ID``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_ID=doi:10.5072/FK2/AAA000
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/files/:persistentId?persistentId=$PERSISTENT_ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/files/:persistentId?persistentId=doi:10.5072/FK2/AAA000"
+
+Getting File Metadata
+~~~~~~~~~~~~~~~~~~~~~
+
+Provides a json representation of the file metadata for an existing file where ``ID`` is the database id of the file to get metadata from or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file.
+
+A curl example using an ``ID``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=24
+
+  curl "$SERVER_URL/api/files/$ID/metadata"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/files/24/metadata"
+
+A curl example using a ``PERSISTENT_ID``
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_ID=doi:10.5072/FK2/AAA000
+
+  curl "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000"
 
-Note that this requires "superuser" credentials.
+The current draft can also be viewed if you have permissions and pass your API token
 
 A curl example using an ``ID``
 
@@ -2285,13 +3133,13 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/reingest
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$ID/metadata/draft"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/reingest
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/24/metadata/draft"
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2301,26 +3149,22 @@ A curl example using a ``PERSISTENT_ID``
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/:persistentId/reingest?persistentId=$PERSISTENT_ID
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/metadata/draft?persistentId=$PERSISTENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/reingest?persistentId=doi:10.5072/FK2/AAA000"
-
-Note: at present, the API cannot be used on a file that's already successfully ingested as tabular.
-
-.. _redetect-file-type:
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/metadata/draft?persistentId=doi:10.5072/FK2/AAA000"
 
-Redetect File Type
-~~~~~~~~~~~~~~~~~~
+Note: The ``id`` returned in the json response is the id of the file metadata version.
 
-The Dataverse Software uses a variety of methods for determining file types (MIME types or content types) and these methods (listed below) are updated periodically. If you have files that have an unknown file type, you can have the Dataverse Software attempt to redetect the file type.
+Getting File Data Tables
+~~~~~~~~~~~~~~~~~~~~~~~~
 
-When using the curl command below, you can pass ``dryRun=true`` if you don't want any changes to be saved to the database. Change this to ``dryRun=false`` (or omit it) to save the change.
+This endpoint is oriented toward tabular files and provides a JSON representation of the file data tables for an existing tabular file. ``ID`` is the database id of the file to get the data tables from or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file.
 
-A curl example using an ``id``
+A curl example using an ``ID``
 
 .. code-block:: bash
 
@@ -2328,15 +3172,15 @@ A curl example using an ``id``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/redetect?dryRun=true"
+  curl $SERVER_URL/api/files/$ID/dataTables
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/redetect?dryRun=true"
+  curl https://demo.dataverse.org/api/files/24/dataTables
 
-A curl example using a ``pid``
+A curl example using a ``PERSISTENT_ID``
 
 .. code-block:: bash
 
@@ -2344,31 +3188,24 @@ A curl example using a ``pid``
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/redetect?persistentId=$PERSISTENT_ID&dryRun=true"
+  curl "$SERVER_URL/api/files/:persistentId/dataTables?persistentId=$PERSISTENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/redetect?persistentId=doi:10.5072/FK2/AAA000&dryRun=true"
-
-Currently the following methods are used to detect file types:
-
-- The file type detected by the browser (or sent via API).
-- JHOVE: http://jhove.openpreservation.org
-- The file extension (e.g. ".ipybn") is used, defined in a file called ``MimeTypeDetectionByFileExtension.properties``.
-- The file name (e.g. "Dockerfile") is used, defined in a file called ``MimeTypeDetectionByFileName.properties``.
+  curl "https://demo.dataverse.org/api/files/:persistentId/dataTables?persistentId=doi:10.5072/FK2/AAA000"
 
-.. _extractNcml:
+Note that if the requested file is not tabular, the endpoint will return an error.
 
-Extract NcML
-~~~~~~~~~~~~
+.. _file-download-count:
 
-As explained in the :ref:`netcdf-and-hdf5` section of the User Guide, when those file types are uploaded, an attempt is made to extract an NcML file from them and store it as an auxiliary file.
+Getting File Download Count
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-This happens automatically but superusers can also manually trigger this NcML extraction process with the API endpoint below.
+Provides the download count for a particular file, where ``ID`` is the database id of the file to get the download count from or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file.
 
-Note that "true" will be returned if an NcML file was created. "false" will be returned if there was an error or if the NcML file already exists (check server.log for details).
+A curl example using an ``ID``
 
 .. code-block:: bash
 
@@ -2376,15 +3213,15 @@ Note that "true" will be returned if an NcML file was created. "false" will be r
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/extractNcml"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/downloadCount"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/extractNcml
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/downloadCount"
 
-A curl example using a PID:
+A curl example using a ``PERSISTENT_ID``
 
 .. code-block:: bash
 
@@ -2392,20 +3229,20 @@ A curl example using a PID:
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/:persistentId/extractNcml?persistentId=$PERSISTENT_ID"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/:persistentId/extractNcml?persistentId=doi:10.5072/FK2/AAA000"
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000"
 
-Replacing Files
-~~~~~~~~~~~~~~~
+If you are interested in download counts for multiple files, see :doc:`/api/metrics`.
 
-Replace an existing file where ``ID`` is the database id of the file to replace or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires the ``file`` to be passed as well as a ``jsonString`` expressing the new metadata.  Note that metadata such as description, directoryLabel (File Path) and tags are not carried over from the file being replaced.
+File Has Been Deleted
+~~~~~~~~~~~~~~~~~~~~~
 
-Note that when a Dataverse installation is configured to use S3 storage with direct upload enabled, there is API support to send a replacement file directly to S3. This is more complex and is described in the :doc:`/developers/s3-direct-upload-api` guide.
+Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version.
 
 A curl example using an ``ID``
 
@@ -2415,15 +3252,13 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@file.extension' -F 'jsonData={json}' $SERVER_URL/api/files/$ID/replace
+  curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/hasBeenDeleted"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F 'file=@data.tsv' \
-    -F 'jsonData={"description":"My description.","categories":["Data"],"forceReplace":false}' \
-    https://demo.dataverse.org/api/files/24/replace
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/hasBeenDeleted"
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2433,21 +3268,18 @@ A curl example using a ``PERSISTENT_ID``
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -F 'file=@file.extension' -F 'jsonData={json}' \
-    "$SERVER_URL/api/files/:persistentId/replace?persistentId=$PERSISTENT_ID"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/hasBeenDeleted?persistentId=$PERSISTENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -F 'file=@data.tsv' \
-    -F 'jsonData={"description":"My description.","categories":["Data"],"forceReplace":false}' \
-    "https://demo.dataverse.org/api/files/:persistentId/replace?persistentId=doi:10.5072/FK2/AAA000"
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET  "https://demo.dataverse.org/api/files/:persistentId/hasBeenDeleted?persistentId=doi:10.5072/FK2/AAA000"
 
-Getting File Metadata
-~~~~~~~~~~~~~~~~~~~~~
+Updating File Metadata
+~~~~~~~~~~~~~~~~~~~~~~
 
-Provides a json representation of the file metadata for an existing file where ``ID`` is the database id of the file to get metadata from or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file.
+Updates the file metadata for an existing file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the new metadata. No metadata from the previous version of this file will be persisted, so if you want to update a specific field first get the json with the above command and alter the fields you want.
 
 A curl example using an ``ID``
 
@@ -2457,13 +3289,17 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl $SERVER_URL/api/files/$ID/metadata
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST \
+    -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \
+    "$SERVER_URL/api/files/$ID/metadata"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/files/24/metadata
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \
+    -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \
+    "https://demo.dataverse.org/api/files/24/metadata"
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2473,15 +3309,39 @@ A curl example using a ``PERSISTENT_ID``
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
 
-  curl "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST \
+    -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \
+    "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000"
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \
+    -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \
+    "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000"
 
-The current draft can also be viewed if you have permissions and pass your API token
+Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata.
+
+Also note that dataFileTags are not versioned and changes to these will update the published version of the file.
+
+.. _EditingVariableMetadata:
+
+Updating File Metadata Categories
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Updates the categories for an existing file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the category names.
+
+Although updating categories can also be done with the previous endpoint, this has been created to be more practical when it is only necessary to update categories and not other metadata fields.
+
+The JSON representation of file categories (``categories.json``) looks like this::
+
+  {
+    "categories": [
+      "Data",
+      "Custom"
+    ]
+  }
 
 A curl example using an ``ID``
 
@@ -2490,14 +3350,19 @@ A curl example using an ``ID``
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
+  export FILE_PATH=categories.json
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/metadata/draft
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST \
+    "$SERVER_URL/api/files/$ID/metadata/categories" \
+    -H "Content-type:application/json" --upload-file $FILE_PATH
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/metadata/draft
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \
+    "http://demo.dataverse.org/api/files/24/metadata/categories" \
+    -H "Content-type:application/json" --upload-file categories.json
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2506,22 +3371,35 @@ A curl example using a ``PERSISTENT_ID``
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
+  export FILE_PATH=categories.json
 
-  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/:persistentId/metadata/draft?persistentId=$PERSISTENT_ID"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST \
+    "$SERVER_URL/api/files/:persistentId/metadata/categories?persistentId=$PERSISTENT_ID" \
+    -H "Content-type:application/json" --upload-file $FILE_PATH
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/:persistentId/metadata/draft?persistentId=doi:10.5072/FK2/AAA000"
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \
+    "https://demo.dataverse.org/api/files/:persistentId/metadata/categories?persistentId=doi:10.5072/FK2/AAA000" \
+    -H "Content-type:application/json" --upload-file categories.json
 
-Note: The ``id`` returned in the json response is the id of the file metadata version.
+Note that if the specified categories do not exist, they will be created.
 
+Updating File Tabular Tags
+~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Updating File Metadata
-~~~~~~~~~~~~~~~~~~~~~~
+Updates the tabular tags for an existing tabular file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the tabular tag names.
 
-Updates the file metadata for an existing file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the new metadata. No metadata from the previous version of this file will be persisted, so if you want to update a specific field first get the json with the above command and alter the fields you want.
+The JSON representation of tabular tags (``tags.json``) looks like this::
+
+  {
+    "tabularTags": [
+      "Survey",
+      "Genomics"
+    ]
+  }
 
 A curl example using an ``ID``
 
@@ -2530,18 +3408,19 @@ A curl example using an ``ID``
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
+  export FILE_PATH=tags.json
 
   curl -H "X-Dataverse-key:$API_TOKEN" -X POST \
-    -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \
-    $SERVER_URL/api/files/$ID/metadata
+    "$SERVER_URL/api/files/$ID/metadata/tabularTags" \
+    -H "Content-type:application/json" --upload-file $FILE_PATH
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
   curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \
-    -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \
-    http://demo.dataverse.org/api/files/24/metadata
+    "http://demo.dataverse.org/api/files/24/metadata/tabularTags" \
+    -H "Content-type:application/json" --upload-file tags.json
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2550,22 +3429,29 @@ A curl example using a ``PERSISTENT_ID``
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
   export SERVER_URL=https://demo.dataverse.org
   export PERSISTENT_ID=doi:10.5072/FK2/AAA000
+  export FILE_PATH=tags.json
 
   curl -H "X-Dataverse-key:$API_TOKEN" -X POST \
-    -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \
-    "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID"
+    "$SERVER_URL/api/files/:persistentId/metadata/tabularTags?persistentId=$PERSISTENT_ID" \
+    -H "Content-type:application/json" --upload-file $FILE_PATH
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
   curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \
-    -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \
-    "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000"
+    "https://demo.dataverse.org/api/files/:persistentId/metadata/tabularTags?persistentId=doi:10.5072/FK2/AAA000" \
+    -H "Content-type:application/json" --upload-file tags.json
 
-Also note that dataFileTags are not versioned and changes to these will update the published version of the file.
+Note that the specified tabular tags must be valid. The supported tags are:
 
-.. _EditingVariableMetadata:
+* ``Survey``
+* ``Time Series``
+* ``Panel``
+* ``Event``
+* ``Genomics``
+* ``Network``
+* ``Geospatial``
 
 Editing Variable Level Metadata
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -2581,13 +3467,13 @@ A curl example using an ``ID``
   export ID=24
   export FILE=dct.xml
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT $SERVER_URL/api/edit/$ID --upload-file $FILE
+  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/edit/$ID" --upload-file $FILE
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT https://demo.dataverse.org/api/edit/24 --upload-file dct.xml
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/edit/24" --upload-file dct.xml
 
 You can download :download:`dct.xml <../../../../src/test/resources/xml/dct.xml>` from the example above to see what the XML looks like.
 
@@ -2605,13 +3491,13 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/prov-json
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$ID/prov-json"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/prov-json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/24/prov-json"
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2640,13 +3526,13 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/files/$ID/prov-freeform
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/files/$ID/prov-freeform"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/files/24/prov-freeform
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/files/24/prov-freeform"
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2677,7 +3563,7 @@ A curl example using an ``ID``
   export ENTITY_NAME="..."
   export FILE_PATH=provenance.json
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/prov-json?entityName=$ENTITY_NAME -H "Content-type:application/json" --upload-file $FILE_PATH
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/prov-json?entityName=$ENTITY_NAME" -H "Content-type:application/json" --upload-file $FILE_PATH
 
 The fully expanded example above (without environment variables) looks like this:
 
@@ -2717,13 +3603,13 @@ A curl example using an ``ID``
   export ID=24
   export FILE_PATH=provenance.json
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/files/$ID/prov-freeform -H "Content-type:application/json" --upload-file $FILE_PATH
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/files/$ID/prov-freeform" -H "Content-type:application/json" --upload-file $FILE_PATH
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST https://demo.dataverse.org/api/files/24/prov-freeform -H "Content-type:application/json" --upload-file provenance.json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/files/24/prov-freeform" -H "Content-type:application/json" --upload-file provenance.json
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2755,13 +3641,13 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/files/$ID/prov-json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/files/$ID/prov-json"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/files/24/prov-json
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/files/24/prov-json"
 
 A curl example using a ``PERSISTENT_ID``
 
@@ -2790,7 +3676,7 @@ Starting with the release 4.10 the size of the saved original file (for an inges
 
   export SERVER_URL=https://localhost
 
-  curl $SERVER_URL/api/admin/datafiles/integrity/fixmissingoriginalsizes
+  curl "$SERVER_URL/api/admin/datafiles/integrity/fixmissingoriginalsizes"
 
 with limit parameter:
 
@@ -2805,13 +3691,13 @@ The fully expanded example above (without environment variables) looks like this
 
 .. code-block:: bash
 
-  curl https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes"
+  curl "https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes"
 
 with limit parameter:
 
 .. code-block:: bash
 
-  curl https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes?limit=10"
+  curl "https://localhost/api/admin/datafiles/integrity/fixmissingoriginalsizes?limit=10"
 
 Note the optional "limit" parameter. Without it, the API will attempt to populate the sizes for all the saved originals that don't have them in the database yet. Otherwise it will do so for the first N such datafiles. 
 
@@ -2833,7 +3719,23 @@ The response is a JSON object described in the :doc:`/api/external-tools` sectio
   export FILEMETADATA_ID=1
   export TOOL_ID=1
 
-  curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/files/$FILE_ID/metadata/$FILEMETADATA_ID/toolparams/$TOOL_ID
+  curl -H "X-Dataverse-key: $API_TOKEN" -H "Accept:application/json" "$SERVER_URL/api/files/$FILE_ID/metadata/$FILEMETADATA_ID/toolparams/$TOOL_ID"
+
+.. _get-fixity-algorithm:
+
+Get Fixity Algorithm
+~~~~~~~~~~~~~~~~~~~~~~
+
+This API call can be used to discover the configured fixity/checksum algorithm being used by a Dataverse installation (as configured by - :ref:`:FileFixityChecksumAlgorithm`).
+Currently, the possible values are MD5, SHA-1, SHA-256, and SHA-512.
+This algorithm will be used when the Dataverse software manages a file upload and should be used by external clients uploading files to a Dataverse instance. (Existing files may or may not have checksums with this algorithm.) 
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+
+  curl "$SERVER_URL/api/files/fixityAlgorithm"
+
 
 Users Token Management
 ----------------------
@@ -2845,21 +3747,21 @@ Find a Token's Expiration Date
 
 In order to obtain the expiration date of a token use::
 
-	curl -H X-Dataverse-key:$API_TOKEN -X GET $SERVER_URL/api/users/token
+	curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/users/token"
 
 Recreate a Token
 ~~~~~~~~~~~~~~~~
 
 In order to obtain a new token use::
 
-	curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/users/token/recreate
+	curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/users/token/recreate"
 
 Delete a Token
 ~~~~~~~~~~~~~~
 
 In order to delete a token use::
 
-	curl -H X-Dataverse-key:$API_TOKEN -X DELETE $SERVER_URL/api/users/token
+	curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/users/token"
 	
 	
 
@@ -2886,26 +3788,14 @@ Optionally, you may use a third query parameter "sendEmailNotification=false" to
 Roles
 -----
 
-Create a New Role in a Dataverse Collection
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Creates a new role under Dataverse collection ``id``. Needs a json file with the role description:
-
-.. code-block:: bash
-
-  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
-  export SERVER_URL=https://demo.dataverse.org
-  export ID=root
-
-  curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-type:application/json" $SERVER_URL/api/dataverses/$ID/roles --upload-file roles.json
-
-The fully expanded example above (without environment variables) looks like this:
+A role is a set of permissions.
 
-.. code-block:: bash
+.. _json-representation-of-a-role:
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -H "Content-type:application/json" https://demo.dataverse.org/api/dataverses/root/roles --upload-file roles.json
+JSON Representation of a Role
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Where ``roles.json`` looks like this::
+The JSON representation of a role (``roles.json``) looks like this::
 
   {
     "alias": "sys1",
@@ -2916,8 +3806,12 @@ Where ``roles.json`` looks like this::
     ]
   } 
 
-.. note:: Only a Dataverse installation account with superuser permissions is allowed to create roles in a Dataverse Collection.
+.. note:: alias is constrained to a length of 16 characters
+
+Create Role
+~~~~~~~~~~~
 
+Roles can be created globally (:ref:`create-global-role`) or for individual Dataverse collections (:ref:`create-role-in-collection`).
 
 Show Role
 ~~~~~~~~~
@@ -2937,13 +3831,13 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/roles/$ID
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/roles/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/roles/24
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/24"
 
 A curl example using a Role alias ``ALIAS``
 
@@ -2959,7 +3853,7 @@ The fully expanded example above (without environment variables) looks like this
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/roles/:alias?alias=roleAlias
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/:alias?alias=roleAlias"
 
 
 Explicit Groups
@@ -3052,13 +3946,13 @@ Show Dataverse Software Version and Build Number
 
   export SERVER_URL=https://demo.dataverse.org
 
-  curl $SERVER_URL/api/info/version
+  curl "$SERVER_URL/api/info/version"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/info/version
+  curl "https://demo.dataverse.org/api/info/version"
 
 Show Dataverse Installation Server Name
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3071,13 +3965,13 @@ Get the server name. This is useful when a Dataverse installation is composed of
 
   export SERVER_URL=https://demo.dataverse.org
 
-  curl $SERVER_URL/api/info/server
+  curl "$SERVER_URL/api/info/server"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/info/server
+  curl "https://demo.dataverse.org/api/info/server"
 
 Show Custom Popup Text for Publishing Datasets
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3090,13 +3984,13 @@ For now, only the value for the :ref:`:DatasetPublishPopupCustomText` setting fr
 
   export SERVER_URL=https://demo.dataverse.org
 
-  curl $SERVER_URL/api/info/settings/:DatasetPublishPopupCustomText
+  curl "$SERVER_URL/api/info/settings/:DatasetPublishPopupCustomText"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/info/settings/:DatasetPublishPopupCustomText
+  curl "https://demo.dataverse.org/api/info/settings/:DatasetPublishPopupCustomText"
 
 Get API Terms of Use URL
 ~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3109,13 +4003,75 @@ Get API Terms of Use. The response contains the text value inserted as API Terms
 
   export SERVER_URL=https://demo.dataverse.org
 
-  curl $SERVER_URL/api/info/apiTermsOfUse
+  curl "$SERVER_URL/api/info/apiTermsOfUse"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/info/apiTermsOfUse"
+
+.. _info-incomplete-metadata:
+
+Show Support Of Incomplete Metadata Deposition
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Learn if an instance has been configured to allow deposition of incomplete datasets via the API.
+See also :ref:`create-dataset-command` and :ref:`dataverse.api.allow-incomplete-metadata`
+
+.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below.
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+
+  curl "$SERVER_URL/api/info/settings/incompleteMetadataViaApi"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/info/settings/incompleteMetadataViaApi"
+
+Get Zip File Download Limit
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Get the configured zip file download limit. The response contains the long value of the limit in bytes.
+
+This limit comes from the database setting :ref:`:ZipDownloadLimit` if set, or the default value if the database setting is not set, which is 104857600 (100MB).
+
+.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below.
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+
+  curl "$SERVER_URL/api/info/zipDownloadLimit"
+
+The fully expanded example above (without environment variables) looks like this:
+
+.. code-block:: bash
+
+  curl "https://demo.dataverse.org/api/info/zipDownloadLimit"
+
+Get Maximum Embargo Duration In Months
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Get the maximum embargo duration in months, if available, configured through the database setting :ref:`:MaxEmbargoDurationInMonths` from the Configuration section of the Installation Guide.
+
+.. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below.
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+
+  curl "$SERVER_URL/api/info/settings/:MaxEmbargoDurationInMonths"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/info/apiTermsOfUse
+  curl "https://demo.dataverse.org/api/info/settings/:MaxEmbargoDurationInMonths"
 
 .. _metadata-blocks-api:
 
@@ -3133,13 +4089,13 @@ Show Info About All Metadata Blocks
 
   export SERVER_URL=https://demo.dataverse.org
 
-  curl $SERVER_URL/api/metadatablocks
+  curl "$SERVER_URL/api/metadatablocks"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/metadatablocks
+  curl "https://demo.dataverse.org/api/metadatablocks"
 
 Show Info About Single Metadata Block
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3151,13 +4107,13 @@ Show Info About Single Metadata Block
   export SERVER_URL=https://demo.dataverse.org
   export IDENTIFIER=citation
 
-  curl $SERVER_URL/api/metadatablocks/$IDENTIFIER
+  curl "$SERVER_URL/api/metadatablocks/$IDENTIFIER"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl https://demo.dataverse.org/api/metadatablocks/citation
+  curl "https://demo.dataverse.org/api/metadatablocks/citation"
 
 .. _Notifications:
 
@@ -3173,7 +4129,7 @@ Each user can get a dump of their notifications by passing in their API token:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/notifications/all
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/notifications/all"
 
 Delete Notification by User
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3184,7 +4140,7 @@ Each user can delete notifications by passing in their API token and specifying
 
   export NOTIFICATION_ID=555
 
-  curl -H X-Dataverse-key:$API_TOKEN -X DELETE "$SERVER_URL/api/notifications/$NOTIFICATION_ID"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/notifications/$NOTIFICATION_ID"
 
 Get All Muted In-app Notifications by User
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3193,7 +4149,7 @@ Each user can get a list of their muted in-app notification types by passing in
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:$API_TOKEN -X GET "$SERVER_URL/api/notifications/mutedNotifications"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/notifications/mutedNotifications"
 
 Mute In-app Notification by User
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3204,7 +4160,7 @@ Each user can mute in-app notifications by passing in their API token and specif
 
   export NOTIFICATION_TYPE=ASSIGNROLE
 
-  curl -H X-Dataverse-key:$API_TOKEN -X PUT "$SERVER_URL/api/notifications/mutedNotifications/$NOTIFICATION_TYPE"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/notifications/mutedNotifications/$NOTIFICATION_TYPE"
 
 Unmute In-app Notification by User
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3215,7 +4171,7 @@ Each user can unmute in-app notifications by passing in their API token and spec
 
   export NOTIFICATION_TYPE=ASSIGNROLE
 
-  curl -H X-Dataverse-key:$API_TOKEN -X DELETE "$SERVER_URL/api/notifications/mutedNotifications/$NOTIFICATION_TYPE"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/notifications/mutedNotifications/$NOTIFICATION_TYPE"
 
 Get All Muted Email Notifications by User
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3224,7 +4180,7 @@ Each user can get a list of their muted email notification types by passing in t
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:$API_TOKEN -X GET "$SERVER_URL/api/notifications/mutedEmails"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/notifications/mutedEmails"
 
 Mute Email Notification by User
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3235,7 +4191,7 @@ Each user can mute email notifications by passing in their API token and specify
 
   export NOTIFICATION_TYPE=ASSIGNROLE
 
-  curl -H X-Dataverse-key:$API_TOKEN -X PUT "$SERVER_URL/api/notifications/mutedEmails/$NOTIFICATION_TYPE"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/notifications/mutedEmails/$NOTIFICATION_TYPE"
 
 Unmute Email Notification by User
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3246,7 +4202,7 @@ Each user can unmute email notifications by passing in their API token and speci
 
   export NOTIFICATION_TYPE=ASSIGNROLE
 
-  curl -H X-Dataverse-key:$API_TOKEN -X DELETE "$SERVER_URL/api/notifications/mutedEmails/$NOTIFICATION_TYPE"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/notifications/mutedEmails/$NOTIFICATION_TYPE"
 
 .. _User Information:
 
@@ -3258,9 +4214,9 @@ Get User Information in JSON Format
 
 Each user can get a dump of their basic information in JSON format by passing in their API token::
 
-    curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/users/:me    
+    curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/users/:me"
+
 
-.. _pids-api:
 
 Managing Harvesting Server and Sets
 -----------------------------------
@@ -3305,7 +4261,7 @@ An example JSON file would look like this::
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
   export SERVER_URL=https://demo.dataverse.org
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST "$SERVER_URL/api/harvest/server/oaisets/add" --upload-file harvestset-finch.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/harvest/server/oaisets/add" --upload-file harvestset-finch.json
 
 The fully expanded example above (without the environment variables) looks like this:
 
@@ -3340,7 +4296,7 @@ An example JSON file would look like this::
   export SERVER_URL=https://demo.dataverse.org
   export SPECNAME=ffAuthor
 
-  curl -H X-Dataverse-key:$API_TOKEN -X PUT "$SERVER_URL/api/harvest/server/oaisets/$SPECNAME" --upload-file modify-harvestset-finch.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/harvest/server/oaisets/$SPECNAME" --upload-file modify-harvestset-finch.json
 
 The fully expanded example above (without the environment variables) looks like this:
 
@@ -3361,7 +4317,7 @@ To delete a harvesting set, use the set's database name. For example, to delete
   export SERVER_URL=https://demo.dataverse.org
   export SPECNAME=ffAuthor
 
-  curl -H X-Dataverse-key:$API_TOKEN -X DELETE "$SERVER_URL/api/harvest/server/oaisets/$SPECNAME"
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/harvest/server/oaisets/$SPECNAME"
 
 The fully expanded example above (without the environment variables) looks like this:
 
@@ -3371,6 +4327,9 @@ The fully expanded example above (without the environment variables) looks like
 
 Only users with superuser permissions may delete harvesting sets.
 
+
+.. _managing-harvesting-clients-api:
+
 Managing Harvesting Clients
 ---------------------------
 
@@ -3458,6 +4417,9 @@ An example JSON file would look like this::
     "set": "user-lmops"
   }
 
+Something important to keep in mind about this API is that, unlike the harvesting clients GUI, it will create a client with the values supplied without making any attempts to validate them in real time. In other words, for the `harvestUrl` it will accept anything that looks like a well-formed url, without making any OAI calls to verify that the name of the set and/or the metadata format entered are supported by it. This is by design, to give an admin an option to still be able to create a client, in a rare case when it cannot be done via the GUI because of some real time failures in an exchange with an otherwise valid OAI server. This however puts the responsibility on the admin to supply the values already confirmed to be valid. 
+
+
 .. note:: See :ref:`curl-examples-and-environment-variables` if you are unfamiliar with the use of export below.
 
 .. code-block:: bash
@@ -3465,7 +4427,7 @@ An example JSON file would look like this::
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
   export SERVER_URL=http://localhost:8080
 
-  curl -H X-Dataverse-key:$API_TOKEN -X POST -H "Content-Type: application/json" "$SERVER_URL/api/harvest/clients/zenodo" --upload-file client.json
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-Type: application/json" "$SERVER_URL/api/harvest/clients/zenodo" --upload-file client.json
 
 The fully expanded example above (without the environment variables) looks like this:
 
@@ -3514,6 +4476,9 @@ Self-explanatory:
 Only users with superuser permissions may delete harvesting clients.
 
 
+
+.. _pids-api:
+
 PIDs
 ----
 
@@ -3532,13 +4497,13 @@ Get information on a PID, especially its "state" such as "draft" or "findable".
   export SERVER_URL=https://demo.dataverse.org
   export PID=doi:10.70122/FK2/9BXT5O
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/pids?persistentId=$PID
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/pids?persistentId=$PID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/pids?persistentId=doi:10.70122/FK2/9BXT5O
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/pids?persistentId=doi:10.70122/FK2/9BXT5O"
 
 List Unreserved PIDs
 ~~~~~~~~~~~~~~~~~~~~
@@ -3552,14 +4517,14 @@ Get a list of PIDs that have not been reserved on the PID provider side. This ca
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
   export SERVER_URL=https://demo.dataverse.org
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/pids/unreserved
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/pids/unreserved"
 
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx https://demo.dataverse.org/api/pids/unreserved
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/pids/unreserved"
 
 Reserve a PID
 ~~~~~~~~~~~~~
@@ -3574,13 +4539,13 @@ Reserved a PID for a dataset. A superuser API token is required.
   export SERVER_URL=https://demo.dataverse.org
   export PID=doi:10.70122/FK2/9BXT5O
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/pids/:persistentId/reserve?persistentId=$PID
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/pids/:persistentId/reserve?persistentId=$PID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST https://demo.dataverse.org/api/pids/:persistentId/reserve?persistentId=doi:10.70122/FK2/9BXT5O
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/pids/:persistentId/reserve?persistentId=doi:10.70122/FK2/9BXT5O"
 
 Delete a PID
 ~~~~~~~~~~~~
@@ -3595,13 +4560,13 @@ Delete PID (this is only possible for PIDs that are in the "draft" state) and wi
   export SERVER_URL=https://demo.dataverse.org
   export PID=doi:10.70122/FK2/9BXT5O
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/pids/:persistentId/delete?persistentId=$PID
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/pids/:persistentId/delete?persistentId=$PID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X DELETE https://demo.dataverse.org/api/pids/:persistentId/delete?persistentId=doi:10.70122/FK2/9BXT5O
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/pids/:persistentId/delete?persistentId=doi:10.70122/FK2/9BXT5O"
 
 
 .. _admin:
@@ -3651,7 +4616,7 @@ Note that HTML can be included in banner messages.
 
 Add a Banner Message::
 
-  curl -H "Content-type:application/json" -X POST http://$SERVER/api/admin/bannerMessage --upload-file messages.json
+  curl -H "Content-type:application/json" -X POST "http://$SERVER/api/admin/bannerMessage" --upload-file messages.json
   
 Where ``messages.json`` looks like this::
 
@@ -3671,15 +4636,15 @@ Where ``messages.json`` looks like this::
      
 Get a list of active Banner Messages::
 
-  curl  -X GET http://$SERVER/api/admin/bannerMessage
+  curl  -X GET "http://$SERVER/api/admin/bannerMessage"
   
 Delete a Banner Message by its id::
 
-  curl  -X DELETE http://$SERVER/api/admin/bannerMessage/$id   
+  curl  -X DELETE "http://$SERVER/api/admin/bannerMessage/$id"
   
 Deactivate a Banner Message by its id (allows you to hide a message while retaining information about which users have dismissed the banner)::
 
-  curl  -X PUT http://$SERVER/api/admin/bannerMessage/$id/deactivate    
+  curl  -X PUT "http://$SERVER/api/admin/bannerMessage/$id/deactivate"
 
 List Authentication Provider Factories
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3732,7 +4697,7 @@ Check whether an authentication proider is enabled::
 
 The body of the request should be either ``true`` or ``false``. Content type has to be ``application/json``, like so::
 
-  curl -H "Content-type: application/json"  -X POST -d"false" http://localhost:8080/api/admin/authenticationProviders/echo-dignified/:enabled
+  curl -H "Content-type: application/json"  -X POST -d"false" "http://localhost:8080/api/admin/authenticationProviders/echo-dignified/:enabled"
 
 Delete an Authentication Provider
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -3748,13 +4713,25 @@ List all global roles in the system. ::
 
     GET http://$SERVER/api/admin/roles
 
+.. _create-global-role:
+
 Create Global Role
 ~~~~~~~~~~~~~~~~~~
 
 Creates a global role in the Dataverse installation. The data POSTed are assumed to be a role JSON. ::
 
     POST http://$SERVER/api/admin/roles
-    
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ID=root
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/roles" --upload-file roles.json
+
+``roles.json`` see :ref:`json-representation-of-a-role`
+
 Delete Global Role
 ~~~~~~~~~~~~~~~~~~
 
@@ -3766,13 +4743,13 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE $SERVER_URL/api/admin/roles/$ID
+  curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/admin/roles/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/admin/roles/24
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/admin/roles/24"
 
 A curl example using a Role alias ``ALIAS``
 
@@ -3788,7 +4765,7 @@ The fully expanded example above (without environment variables) looks like this
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE https://demo.dataverse.org/api/admin/roles/:alias?alias=roleAlias    
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/admin/roles/:alias?alias=roleAlias"
 
 List Users
 ~~~~~~~~~~
@@ -3806,7 +4783,7 @@ List users with the options to search and "page" through results. Only accessibl
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -H "X-Dataverse-key:$API_TOKEN" $SERVER_URL/api/admin/list-users
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/list-users"
 
   # sort by createdtime (the creation time of the account)
   curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/admin/list-users?sortKey=createdtime"
@@ -3815,7 +4792,7 @@ The fully expanded example above (without environment variables) looks like this
 
 .. code-block:: bash
 
-  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" https://demo.dataverse.org/api/admin/list-users
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/admin/list-users"
 
   # sort by createdtime (the creation time of the account)
   curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/admin/list-users?sortKey=createdtime"
@@ -3966,7 +4943,7 @@ If a user has created multiple accounts and has been performed actions under bot
 
     POST https://$SERVER/api/users/$toMergeIdentifier/mergeIntoUser/$continuingIdentifier
 
-Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST http://demo.dataverse.org/api/users/jsmith2/mergeIntoUser/jsmith``
+Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST "http://demo.dataverse.org/api/users/jsmith2/mergeIntoUser/jsmith"``
 
 This action moves account data from jsmith2 into the account jsmith and deletes the account of jsmith2.
 
@@ -3981,7 +4958,7 @@ Changes identifier for user in ``AuthenticatedUser``, ``BuiltinUser``, ``Authent
 
     POST http://$SERVER/api/users/$oldIdentifier/changeIdentifier/$newIdentifier
 
-Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST  https://demo.dataverse.org/api/users/johnsmith/changeIdentifier/jsmith``
+Example: ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST  "https://demo.dataverse.org/api/users/johnsmith/changeIdentifier/jsmith"``
 
 This action changes the identifier of user johnsmith to jsmith.
 
@@ -4021,13 +4998,13 @@ Deactivates a user. A superuser API token is not required but the command will o
   export SERVER_URL=http://localhost:8080
   export USERNAME=jdoe
 
-  curl -X POST $SERVER_URL/api/admin/authenticatedUsers/$USERNAME/deactivate
+  curl -X POST "$SERVER_URL/api/admin/authenticatedUsers/$USERNAME/deactivate"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -X POST http://localhost:8080/api/admin/authenticatedUsers/jdoe/deactivate
+  curl -X POST "http://localhost:8080/api/admin/authenticatedUsers/jdoe/deactivate"
 
 The database ID of the user can be passed instead of the username.
 
@@ -4036,7 +5013,7 @@ The database ID of the user can be passed instead of the username.
   export SERVER_URL=http://localhost:8080
   export USERID=42
 
-  curl -X POST $SERVER_URL/api/admin/authenticatedUsers/id/$USERID/deactivate
+  curl -X POST "$SERVER_URL/api/admin/authenticatedUsers/id/$USERID/deactivate"
 
 Note: A primary purpose of most Dataverse installations is to serve an archive. In the archival space, there are best practices around the tracking of data access and the tracking of modifications to data and metadata. In support of these key workflows, a simple mechanism to delete users that have performed edit or access actions in the system is not provided. Providing a Deactivate User endpoint for users who have taken certain actions in the system alongside a Delete User endpoint to remove users that haven't taken certain actions in the system is by design.
 
@@ -4075,13 +5052,13 @@ Show the traces that the user has left in the system, such as datasets created,
   export SERVER_URL=https://demo.dataverse.org
   export USERNAME=jdoe
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X GET $SERVER_URL/api/users/$USERNAME/traces
+  curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/users/$USERNAME/traces"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X GET https://demo.dataverse.org/api/users/jdoe/traces
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/users/jdoe/traces"
 
 Remove All Roles from a User
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -4096,13 +5073,13 @@ Removes all roles from the user. This is equivalent of clicking the "Remove All
   export SERVER_URL=https://demo.dataverse.org
   export USERNAME=jdoe
 
-  curl -H "X-Dataverse-key:$API_TOKEN" -X POST $SERVER_URL/api/users/$USERNAME/removeRoles
+  curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/users/$USERNAME/removeRoles"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST http://localhost:8080/api/users/jdoe/removeRoles
+  curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "http://localhost:8080/api/users/jdoe/removeRoles"
 
 List Role Assignments of a Role Assignee
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -4166,11 +5143,11 @@ Datafile Integrity
 
 Recalculate the check sum value value of a datafile, by supplying the file's database id and an algorithm (Valid values for $ALGORITHM include MD5, SHA-1, SHA-256, and SHA-512)::
 
-   curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/admin/computeDataFileHashValue/{fileId}/algorithm/$ALGORITHM
+   curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/computeDataFileHashValue/{fileId}/algorithm/$ALGORITHM"
   
 Validate an existing check sum value against one newly calculated from the saved file:: 
 
-   curl -H X-Dataverse-key:$API_TOKEN -X POST $SERVER_URL/api/admin/validateDataFileHashValue/{fileId}
+   curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/validateDataFileHashValue/{fileId}"
 
 .. _dataset-files-validation-api:
 
@@ -4183,7 +5160,7 @@ The following validates all the physical files in the dataset specified, by reca
 
 It will report the specific files that have failed the validation. For example::
    
-   curl http://localhost:8080/api/admin/validate/dataset/files/:persistentId/?persistentId=doi:10.5072/FK2/XXXXX
+   curl "http://localhost:8080/api/admin/validate/dataset/files/:persistentId/?persistentId=doi:10.5072/FK2/XXXXX"
      {"dataFiles": [
      		  {"datafileId":2658,"storageIdentifier":"file://123-aaa","status":"valid"},
 		  {"datafileId":2659,"storageIdentifier":"file://123-bbb","status":"invalid","errorMessage":"Checksum mismatch for datafile id 2669"}, 
@@ -4193,6 +5170,26 @@ It will report the specific files that have failed the validation. For example::
   
 These are only available to super users.
 
+.. _UpdateChecksums:
+
+Update Checksums To Use New Algorithm
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The fixity algorithm used on existing files can be changed by a superuser using this API call. An optional query parameter (num) can be used to limit the number of updates attempted (i.e. to do processing in batches).
+The API call will only update the algorithm and checksum for a file if the existing checksum can be validated against the file.
+Statistics concerning the updates are returned in the response to the API call with details in the log.
+The primary use for this API call is to update existing files after the algorithm used when uploading new files is changes - see - :ref:`:FileFixityChecksumAlgorithm`.
+Allowed values are MD5, SHA-1, SHA-256, and SHA-512
+
+.. code-block:: bash
+
+  export ALG=SHA-256
+  export BATCHSIZE=1
+
+  curl "http://localhost:8080/api/admin/updateHashValues/$ALG"
+  curl "http://localhost:8080/api/admin/updateHashValues/$ALG?num=$BATCHSIZE"
+
+
 .. _dataset-validation-api:
 
 Dataset Validation
@@ -4200,7 +5197,7 @@ Dataset Validation
 
 Validate the dataset and its components (DatasetVersion, FileMetadatas, etc.) for constraint violations::
 
-  curl $SERVER_URL/api/admin/validate/dataset/{datasetId}
+  curl "$SERVER_URL/api/admin/validate/dataset/{datasetId}"
 
 if validation fails, will report the specific database entity and the offending value. For example::
    
@@ -4210,7 +5207,7 @@ If the optional argument ``variables=true`` is specified, the API will also vali
 
 Validate all the datasets in the Dataverse installation, report any constraint violations found::
 
-  curl $SERVER_URL/api/admin/validate/datasets
+  curl "$SERVER_URL/api/admin/validate/datasets"
 
 If the optional argument ``variables=true`` is specified, the API will also validate the metadata associated with any tabular data files. (For example: an invalid or empty variable name). Note that validating all the tabular metadata may significantly increase the run time of the full validation pass. 
 
@@ -4319,14 +5316,14 @@ View the list of standard license terms that can be selected for a dataset:
 .. code-block:: bash
 
   export SERVER_URL=https://demo.dataverse.org
-  curl $SERVER_URL/api/licenses
+  curl "$SERVER_URL/api/licenses"
 
 View the details of the standard license with the database ID specified in ``$ID``:
 
 .. code-block:: bash
 
   export ID=1
-  curl $SERVER_URL/api/licenses/$ID
+  curl "$SERVER_URL/api/licenses/$ID"
 
 
 Superusers can add a new license by posting a JSON file adapted from this example :download:`add-license.json <../_static/api/add-license.json>`. The ``name`` and ``uri`` of the new license must be unique. Sort order field is mandatory. If you are interested in adding a Creative Commons license, you are encouarged to use the JSON files under :ref:`adding-creative-commons-licenses`:
@@ -4334,33 +5331,33 @@ Superusers can add a new license by posting a JSON file adapted from this exampl
 .. code-block:: bash
 
   export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
-  curl -X POST -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN --data-binary @add-license.json $SERVER_URL/api/licenses
+  curl -X POST -H 'Content-Type: application/json' -H "X-Dataverse-key:$API_TOKEN" --data-binary @add-license.json "$SERVER_URL/api/licenses"
 
 Superusers can change whether an existing license is active (usable for new dataset versions) or inactive (only allowed on already-published versions) specified by the license ``$ID``:
 
 .. code-block:: bash
 
   export STATE=true
-  curl -X PUT -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID/:active/$STATE
+  curl -X PUT -H 'Content-Type: application/json' -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/licenses/$ID/:active/$STATE"
 
 Superusers may change the default license by specifying the license ``$ID``:
 
 .. code-block:: bash
 
-  curl -X PUT -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/default/$ID
+  curl -X PUT -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/licenses/default/$ID"
 
 Superusers can delete a license, provided it is not in use, by the license ``$ID``:
 
 .. code-block:: bash
 
-  curl -X DELETE -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID
+  curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/licenses/$ID"
 
 Superusers can change the sorting order of a license specified by the license ``$ID``:
 
 .. code-block:: bash
 
   export SORT_ORDER=100
-  curl -X PUT -H 'Content-Type: application/json' -H X-Dataverse-key:$API_TOKEN $SERVER_URL/api/licenses/$ID/:sortOrder/$SORT_ORDER
+  curl -X PUT -H 'Content-Type: application/json' -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/licenses/$ID/:sortOrder/$SORT_ORDER"
   
 List Dataset Templates
 ~~~~~~~~~~~~~~~~~~~~~~
@@ -4384,13 +5381,13 @@ A curl example using an ``ID``
   export SERVER_URL=https://demo.dataverse.org
   export ID=24
 
-  curl -X DELETE $SERVER_URL/api/admin/template/$ID
+  curl -X DELETE "$SERVER_URL/api/admin/template/$ID"
 
 The fully expanded example above (without environment variables) looks like this:
 
 .. code-block:: bash
 
-  curl -X DELETE https://demo.dataverse.org/api/admin/template/24
+  curl -X DELETE "https://demo.dataverse.org/api/admin/template/24"
 
 .. _api-native-signed-url:
   
@@ -4417,7 +5414,99 @@ A curl example using allowing access to a dataset's metadata
   export API_KEY=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
   export JSON='{"url":"https://demo.dataverse.org/api/v1/datasets/:persistentId/?persistentId=doi:10.5072/FK2/J8SJZB","timeOut":5,"user":"alberteinstein"}'
 
-  curl -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" $SERVER_URL/api/admin/requestSignedUrl
+  curl -H "X-Dataverse-key:$API_KEY" -H 'Content-Type:application/json' -d "$JSON" "$SERVER_URL/api/admin/requestSignedUrl"
 
 Please see :ref:`dataverse.api.signature-secret` for the configuration option to add a shared secret, enabling extra
 security.
+
+.. _send-feedback:
+
+Send Feedback To Contact(s)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This API call allows sending an email to the contacts for a collection, dataset, or datafile or to the support email address when no object is specified.
+The call is protected by the normal /admin API protections (limited to localhost or requiring a separate key), but does not otherwise limit the sending of emails.
+Administrators should be sure only trusted applications have access to avoid the potential for spam.
+
+The call is a POST with a JSON object as input with four keys:
+- "targetId" - the id of the collection, dataset, or datafile. Persistent ids and collection aliases are not supported. (Optional)
+- "subject" - the email subject line
+- "body" - the email body to send
+- "fromEmail" - the email to list in the reply-to field. (Dataverse always sends mail from the system email, but does it "on behalf of" and with a reply-to for the specified user.)
+
+A curl example using an ``ID``
+
+.. code-block:: bash
+
+  export SERVER_URL=http://localhost
+  export JSON='{"targetId":24, "subject":"Data Question", "body":"Please help me understand your data. Thank you!", "fromEmail":"dataverseSupport@mailinator.com"}'
+
+  curl -X POST -H 'Content-Type:application/json' -d "$JSON" "$SERVER_URL/api/admin/feedback"
+
+Note that this call could be useful in coordinating with dataset authors (assuming they are also contacts) as an alternative/addition to the functionality provided by :ref:`return-a-dataset`.
+
+.. _thumbnail_reset:
+
+Reset Thumbnail Failure Flags
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If Dataverse attempts to create a thumbnail image for an image or PDF file and the attempt fails, Dataverse will set a flag for the file to avoid repeated attempts to generate the thumbnail.
+For cases where the problem may have been temporary (or fixed in a later Dataverse release), the API calls below can be used to reset this flag for all files or for a given file.
+
+.. code-block:: bash
+
+  export SERVER_URL=https://demo.dataverse.org
+  export FILE_ID=1234
+
+  curl -X DELETE $SERVER_URL/api/admin/clearThumbnailFailureFlag
+  
+  curl -X DELETE $SERVER_URL/api/admin/clearThumbnailFailureFlag/$FILE_ID
+
+.. _download-file-from-tmp:
+
+Download File from /tmp
+~~~~~~~~~~~~~~~~~~~~~~~
+
+As a superuser::
+
+    GET /api/admin/downloadTmpFile?fullyQualifiedPathToFile=/tmp/foo.txt
+
+Note that this API is probably only useful for testing.
+
+MyData
+------
+
+The MyData API is used to get a list of just the datasets, dataverses or datafiles an authenticated user can edit.
+
+A curl example listing objects
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export ROLE_IDS=6
+  export DVOBJECT_TYPES=Dataset
+  export PUBLISHED_STATES=Unpublished
+  export PER_PAGE=10
+
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/mydata/retrieve?role_ids=$ROLE_IDS&dvobject_types=$DVOBJECT_TYPES&published_states=$PUBLISHED_STATES&per_page=$PER_PAGE"
+
+Parameters:
+
+``role_id`` Roles are customizable. Standard roles include:
+
+- ``1`` = Admin
+- ``2`` = File Downloader
+- ``3`` = Dataverse + Dataset Creator
+- ``4`` = Dataverse Creator
+- ``5`` = Dataset Creator
+- ``6`` = Contributor
+- ``7`` = Curator
+- ``8`` = Member
+
+``dvobject_types`` Type of object, several possible values among: ``DataFile`` , ``Dataset`` & ``Dataverse`` .
+
+``published_states`` State of the object, several possible values among:``Published`` , ``Unpublished`` , ``Draft`` , ``Deaccessioned`` & ``In+Review`` .
+
+``per_page`` Number of results returned per page.
+
diff --git a/doc/sphinx-guides/source/api/sword.rst b/doc/sphinx-guides/source/api/sword.rst
index 11b43e98774..51391784bde 100755
--- a/doc/sphinx-guides/source/api/sword.rst
+++ b/doc/sphinx-guides/source/api/sword.rst
@@ -9,19 +9,19 @@ SWORD_ stands for "Simple Web-service Offering Repository Deposit" and is a "pro
 About
 -----
 
-Introduced in Dataverse Network (DVN) `3.6 <http://guides.dataverse.org/en/3.6.2/dataverse-api-main.html#data-deposit-api>`_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse Software 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`.
+Introduced in Dataverse Network (DVN) `3.6 <https://guides.dataverse.org/en/3.6.2/dataverse-api-main.html#data-deposit-api>`_, the SWORD API was formerly known as the "Data Deposit API" and ``data-deposit/v1`` appeared in the URLs. For backwards compatibility these URLs continue to work (with deprecation warnings). Due to architectural changes and security improvements (especially the introduction of API tokens) in Dataverse Software 4.0, a few backward incompatible changes were necessarily introduced and for this reason the version has been increased to ``v1.1``. For details, see :ref:`incompatible`.
 
-The Dataverse Software implements most of SWORDv2_, which is specified at http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc.
+The Dataverse Software implements most of SWORDv2_, which is specified at https://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html . Please reference the `SWORDv2 specification`_ for expected HTTP status codes (i.e. 201, 204, 404, etc.), headers (i.e. "Location"), etc.
 
 As a profile of AtomPub, XML is used throughout SWORD. As of Dataverse Software 4.0 datasets can also be created via JSON using the "native" API. SWORD is limited to the dozen or so fields listed below in the crosswalk, but the native API allows you to populate all metadata fields available in a Dataverse installation.
 
-.. _SWORD: http://en.wikipedia.org/wiki/SWORD_%28protocol%29
+.. _SWORD: https://en.wikipedia.org/wiki/SWORD_%28protocol%29
 
 .. _SWORDv2: http://swordapp.org/sword-v2/sword-v2-specifications/
 
 .. _RFC 5023: https://tools.ietf.org/html/rfc5023
 
-.. _SWORDv2 specification: http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html
+.. _SWORDv2 specification: https://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html
 
 .. _sword-auth:
 
@@ -86,7 +86,7 @@ New features as of v1.1
 
 - "Contact E-mail" is automatically populated from dataset owner's email.
 
-- "Subject" uses our controlled vocabulary list of subjects. This list is in the Citation Metadata of our User Guide > `Metadata References <http://guides.dataverse.org/en/latest/user/appendix.html#metadata-references>`_. Otherwise, if a term does not match our controlled vocabulary list, it will put any subject terms in "Keyword". If Subject is empty it is automatically populated with "N/A".
+- "Subject" uses our controlled vocabulary list of subjects. This list is in the Citation Metadata of our User Guide > `Metadata References <https://guides.dataverse.org/en/latest/user/appendix.html#metadata-references>`_. Otherwise, if a term does not match our controlled vocabulary list, it will put any subject terms in "Keyword". If Subject is empty it is automatically populated with "N/A".
 
 - Zero-length files are now allowed (but not necessarily encouraged).
 
@@ -127,7 +127,7 @@ Dublin Core Terms (DC Terms) Qualified Mapping - Dataverse Project DB Element Cr
 +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+
 |dcterms:creator              |         authorName (LastName, FirstName)     |       Y      |  Author(s) for the Dataset.                                                                                                                                 |
 +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+
-|dcterms:subject              |   subject (Controlled Vocabulary) OR keyword |       Y      |  Controlled Vocabulary list is in our User Guide > `Metadata References <http://guides.dataverse.org/en/latest/user/appendix.html#metadata-references>`_.   |                                                                                                                
+|dcterms:subject              |   subject (Controlled Vocabulary) OR keyword |       Y      |  Controlled Vocabulary list is in our User Guide > `Metadata References <https://guides.dataverse.org/en/latest/user/appendix.html#metadata-references>`_.  |                                                                                                                
 +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+
 |dcterms:description          |              dsDescriptionValue              |       Y      |  Describing the purpose, scope or nature of the Dataset. Can also use dcterms:abstract.                                                                     |
 +-----------------------------+----------------------------------------------+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------+
diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py
index 736d86cacf5..64efc359e9a 100755
--- a/doc/sphinx-guides/source/conf.py
+++ b/doc/sphinx-guides/source/conf.py
@@ -66,9 +66,9 @@
 # built documents.
 #
 # The short X.Y version.
-version = '5.13'
+version = '6.1'
 # The full version, including alpha/beta/rc tags.
-release = '5.13'
+release = '6.1'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.
@@ -432,7 +432,7 @@
 
 
 # Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {'http://docs.python.org/': None}
+intersphinx_mapping = {'https://docs.python.org/': None}
 # Suppress "WARNING: unknown mimetype for ..." https://github.com/IQSS/dataverse/issues/3391
 suppress_warnings = ['epub.unknown_project_files']
 rst_prolog = """
diff --git a/doc/sphinx-guides/source/container/app-image.rst b/doc/sphinx-guides/source/container/app-image.rst
new file mode 100644
index 00000000000..29f6d6ac1d4
--- /dev/null
+++ b/doc/sphinx-guides/source/container/app-image.rst
@@ -0,0 +1,217 @@
+Dataverse Application Image
+===========================
+
+The application image is a layer on top of the base image and contains the Dataverse software.
+
+.. contents:: |toctitle|
+    :local:
+
+An "application image" offers you a deployment ready Dataverse application running on the underlying
+application server, which is provided by the :doc:`base-image`. Its sole purpose is to bundle the application
+and any additional material necessary to successfully jumpstart the application.
+
+Until all :ref:`jvm-options` are *MicroProfile Config* enabled, it also adds the necessary scripting glue to
+configure the applications domain during booting the application server. See :ref:`app-tunables`.
+
+Within the main repository, you may find the application image's files at ``<git root>/src/main/docker``.
+This is the same Maven module providing a Dataverse WAR file for classic installations, and uses the
+`Maven Docker Plugin <https://dmp.fabric8.io>`_ to build and ship the image within a special Maven profile.
+
+**NOTE: This image is created, maintained and supported by the Dataverse community on a best-effort basis.**
+IQSS will not offer you support how to deploy or run it, please reach out to the community for help on using it.
+You might be interested in taking a look at :doc:`../developers/containers`, linking you to some (community-based)
+efforts.
+
+
+
+Supported Image Tags
+++++++++++++++++++++
+
+This image is sourced from the main upstream code `repository of the Dataverse software <https://github.com/IQSS/dataverse>`_.
+Development and maintenance of the `image's code <https://github.com/IQSS/dataverse/tree/develop>`_ happens there
+(again, by the community).
+
+.. note::
+    Please note that this image is not (yet) available from Docker Hub. You need to build local to use
+    (see below). Follow https://github.com/IQSS/dataverse/issues/9444 for new developments.
+
+
+
+Image Contents
+++++++++++++++
+
+The application image builds by convention upon the :doc:`base image <base-image>` and provides:
+
+- Dataverse class files
+- Resource files
+- Dependency JAR files
+- `JHove <http://jhove.openpreservation.org>`_ configuration
+- Script to configure the application server domain for :ref:`jvm-options` not yet *MicroProfile Config* enabled.
+
+The image is provided as a multi-arch image to support the most common architectures Dataverse usually runs on:
+AMD64 (Windows/Linux/...) and ARM64 (Apple M1/M2). (Easy to extend.)
+
+
+
+Build Instructions
+++++++++++++++++++
+
+Assuming you have `Docker <https://docs.docker.com/engine/install/>`_, `Docker Desktop <https://www.docker.com/products/docker-desktop/>`_,
+`Moby <https://mobyproject.org/>`_ or some remote Docker host configured, up and running from here on.
+
+Simply execute the Maven modules packaging target with activated "container" profile from the projects Git root to
+compile the Java code and build the image:
+
+``mvn -Pct clean package``
+
+Some additional notes, using Maven parameters to change the build and use ...:
+
+- | ... a different tag only: add ``-Dapp.image.tag=tag``.
+  | *Note:* default is ``unstable``
+- | ... a different image name and tag: add ``-Dapp.image=name:tag``.
+  | *Note:* default is ``gdcc/dataverse:${app.image.tag}``
+- ... a different image registry than Docker Hub: add ``-Ddocker.registry=registry.example.org`` (see also
+  `DMP docs on registries <https://dmp.fabric8.io/#registry>`__)
+- | ... a different base image tag: add ``-Dbase.image.tag=tag``
+  | *Note:* default is ``unstable``
+- | ... a different base image: add ``-Dbase.image=name:tag``
+  | *Note:* default is ``gdcc/base:${base.image.tag}``. See also :doc:`base-image` for more details on it.
+
+Automated Builds & Publishing
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+See note above at "Supported Image Tags".
+
+.. _app-multiarch:
+
+Processor Architecture and Multiarch
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This image is created as a "multi-arch image", supporting the most common architectures Dataverse usually runs on:
+AMD64 (Windows/Linux/...) and ARM64 (Apple M1/M2), by using `Maven Docker Plugin's BuildX mode <https://dmp.fabric8.io/#build-buildx>`_.
+
+Building the image via ``mvn -Pct package`` or ``mvn -Pct install`` as above will only build for the architecture of
+the Docker machine's CPU.
+
+Only ``mvn -Pct clean deploy -Ddocker.platforms=linux/amd64,linux/arm64`` will trigger building on all enabled architectures.
+Yet, to enable building with non-native code on your build machine, you will need to setup a cross-platform builder.
+
+On Linux, you should install `qemu-user-static <https://github.com/multiarch/qemu-user-static>`__ (preferably via
+your package management) on the host and run ``docker run --rm --privileged multiarch/qemu-user-static --reset -p yes``
+to enable that builder. The Docker plugin will setup everything else for you.
+
+
+
+.. _app-tunables:
+
+Tunables
+++++++++
+
+The :doc:`base-image` provides a long list of possible options to tune many aspects of the application server, and,
+as the application image builds upon it, :ref:`Base Image Tunables <base-tunables>` apply to it as well.
+
+In addition, the application image provides the following tunables:
+
+.. list-table::
+    :align: left
+    :width: 100
+    :widths: 10 10 10 50
+    :header-rows: 1
+
+    * - Env. variable
+      - Default
+      - Type
+      - Description
+    * - ``MP_CONFIG_PROFILE``
+      - ``ct``
+      - String
+      - Set to switch the activated *MicroProfile Config Profile*. Note that certain defaults will not apply any longer.
+        See :ref:`:ApplicationServerSettings` for details.
+    * - ``dataverse_*`` and ``doi_*``
+      - \-
+      - String
+      - Configure any :ref:`jvm-options` not yet *MicroProfile Config* enabled with this magic trick.
+
+        1. Simply pick a JVM option from the list and replace any ``.`` with ``_``.
+        2. Replace any ``-`` in the option name with ``__``.
+    * - ``DATAVERSE_MAIL_HOST``
+      - ``smtp``
+      - String
+      - A hostname (w/o port!) where to reach a Mail MTA on port 25.
+    * - ``DATAVERSE_MAIL_USER``
+      - ``dataversenotify``
+      - String
+      - A username to use with the Mail MTA
+    * - ``DATAVERSE_MAIL_FROM``
+      - ``dataverse@localhost``
+      - Mail address
+      - The "From" field for all outbound mail. Make sure to set :ref:`systemEmail` to the same value or no mail will
+        be sent.
+
+
+Note that the script ``init_2_configure.sh`` will apply a few very important defaults to enable quick usage
+by a) activating the scheduled tasks timer, b) add local file storage if not disabled, and c) a sensible password
+reset timeout:
+
+.. code-block:: shell
+
+    dataverse_auth_password__reset__timeout__in__minutes=60
+    dataverse_timerServer=true
+    dataverse_files_storage__driver__id=local
+
+    if dataverse_files_storage__driver__id = "local" then
+        dataverse_files_local_type=file
+        dataverse_files_local_label=Local
+        dataverse_files_local_directory=${STORAGE_DIR}/store
+
+
+
+.. _app-locations:
+
+Locations
++++++++++
+
+There are only a few important additions to the list of `locations by the base image <base-locations>`_.
+Please make sure to back these locations with volumes or tmpfs to avoid writing data into the overlay filesystem, which
+will significantly hurt performance.
+
+.. list-table::
+    :align: left
+    :width: 100
+    :widths: 10 10 50
+    :header-rows: 1
+
+    * - Location
+      - Value
+      - Description
+    * - ``${STORAGE_DIR}``
+      - ``/dv``
+      - Defined by base image. Either back this folder or, if suitable, the locations below it with volumes
+        or tmpfs.
+    * - ``${STORAGE_DIR}/uploads``
+      - ``/dv/uploads``
+      - See :ref:`dataverse.files.uploads` for a detailed description.
+    * - ``${STORAGE_DIR}/temp``
+      - ``/dv/temp``
+      - See :ref:`dataverse.files.directory` for a detailed description.
+    * - ``${STORAGE_DIR}/store``
+      - ``/dv/store``
+      - Important when using the default provided local storage option (see above and :ref:`storage-files-dir`)
+    * - ``/tmp``
+      - \-
+      - Location for temporary files, see also :ref:`temporary-file-storage`
+
+
+
+Exposed Ports
++++++++++++++
+
+See base image :ref:`exposed port <base-exposed-ports>`.
+
+
+
+Entry & Extension Points
+++++++++++++++++++++++++
+
+The application image makes use of the base image provided system to execute scripts on boot, see :ref:`base-entrypoint`.
+See there for potential extension of this image in your own derivative.
diff --git a/doc/sphinx-guides/source/container/base-image.rst b/doc/sphinx-guides/source/container/base-image.rst
index 931c722f91b..1a47a8fc413 100644
--- a/doc/sphinx-guides/source/container/base-image.rst
+++ b/doc/sphinx-guides/source/container/base-image.rst
@@ -1,11 +1,13 @@
 Application Base Image
 ======================
 
+The base image contains Payara and other dependencies that the Dataverse software runs on. It is the foundation for the :doc:`app-image`. Note that some dependencies, such as PostgreSQL and Solr, run in their own containers and are not part of the base image.
+
 .. contents:: |toctitle|
     :local:
 
 A "base image" offers you a pre-installed and pre-tuned application server to deploy Dataverse software to.
-Adding basic functionality like executing scripts at container boot, monitoring, memory tweaks etc is all done
+Adding basic functionality like executing scripts at container boot, monitoring, memory tweaks etc. is all done
 at this layer, to make the application image focus on the app itself.
 
 **NOTE: The base image does not contain the Dataverse application itself.**
@@ -15,7 +17,7 @@ This Maven module uses the `Maven Docker Plugin <https://dmp.fabric8.io>`_ to bu
 You may use, extend, or alter this image to your liking and/or host in some different registry if you want to.
 
 **NOTE: This image is created, maintained and supported by the Dataverse community on a best-effort basis.**
-IQSS will not offer you support how to deploy or run it, please reach out to the community for help on using it.
+IQSS will not offer you support how to deploy or run it, please reach out to the community (:ref:`support`) for help on using it.
 You might be interested in taking a look at :doc:`../developers/containers`, linking you to some (community-based)
 efforts.
 
@@ -29,7 +31,7 @@ upstream branches:
 
 - The ``unstable`` tag corresponds to the ``develop`` branch, where pull requests are merged.
   (`Dockerfile <https://github.com/IQSS/dataverse/tree/develop/modules/container-base/src/main/docker/Dockerfile>`__)
-- The ``stable`` tag corresponds to the ``master`` branch, where releases are cut from.
+- The ``alpha`` tag corresponds to the ``master`` branch, where releases are cut from.
   (`Dockerfile <https://github.com/IQSS/dataverse/tree/master/modules/container-base/src/main/docker/Dockerfile>`__)
 
 
@@ -39,7 +41,7 @@ Image Contents
 
 The base image provides:
 
-- `Eclipse Temurin JRE using Java 11 <https://adoptium.net/temurin/releases?version=11>`_
+- `Eclipse Temurin JRE using Java 17 <https://adoptium.net/temurin/releases?version=17>`_
 - `Payara Community Application Server <https://docs.payara.fish/community>`_
 - CLI tools necessary to run Dataverse (i. e. ``curl`` or ``jq`` - see also :doc:`../installation/prerequisites` in Installation Guide)
 - Linux tools for analysis, monitoring and so on
@@ -61,7 +63,7 @@ Build Instructions
 Assuming you have `Docker <https://docs.docker.com/engine/install/>`_, `Docker Desktop <https://www.docker.com/products/docker-desktop/>`_,
 `Moby <https://mobyproject.org/>`_ or some remote Docker host configured, up and running from here on.
 
-Simply execute the Maven modules packaging target with activated "container profile. Either from the projects Git root:
+Simply execute the Maven modules packaging target with activated "container" profile. Either from the projects Git root:
 
 ``mvn -Pct -f modules/container-base install``
 
@@ -72,7 +74,7 @@ Or move to the module and execute:
 Some additional notes, using Maven parameters to change the build and use ...:
 
 - | ... a different tag only: add ``-Dbase.image.tag=tag``.
-  | *Note:* default is ``develop``
+  | *Note:* default is ``unstable``
 - | ... a different image name and tag: add ``-Dbase.image=name:tag``.
   | *Note:* default is ``gdcc/base:${base.image.tag}``
 - ... a different image registry than Docker Hub: add ``-Ddocker.registry=registry.example.org`` (see also
@@ -101,19 +103,26 @@ Processor Architecture and Multiarch
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 This image is created as a "multi-arch image", supporting the most common architectures Dataverse usually runs on:
-AMD64 (Windows/Linux/...) and ARM64 (Apple M1/M2), by using Maven Docker Plugin's *BuildX* mode.
+AMD64 (Windows/Linux/...) and ARM64 (Apple M1/M2), by using `Maven Docker Plugin's BuildX mode <https://dmp.fabric8.io/#build-buildx>`_.
 
 Building the image via ``mvn -Pct package`` or ``mvn -Pct install`` as above will only build for the architecture of
-the Docker maschine's CPU.
+the Docker machine's CPU.
+
+Only ``mvn -Pct deploy`` will trigger building on all enabled architectures (and will try to push the images to a
+registry, which is Docker Hub by default).
 
-Only ``mvn -Pct deploy`` will trigger building on all enabled architectures.
-Yet, to enable building with non-native code on your build machine, you will need to setup a cross-platform builder.
+You can specify which architectures you would like to build for and include by them as a comma separated list:
+``mvn -Pct deploy -Ddocker.platforms="linux/amd64,linux/arm64"``. The shown configuration is the default and may be omitted.
+
+Yet, to enable building with non-native code on your build machine, you will need to setup a cross-platform builder!
 
 On Linux, you should install `qemu-user-static <https://github.com/multiarch/qemu-user-static>`__ (preferably via
 your package management) on the host and run ``docker run --rm --privileged multiarch/qemu-user-static --reset -p yes``
 to enable that builder. The Docker plugin will setup everything else for you.
 
+The upstream CI workflows publish images supporting AMD64 and ARM64 (see e.g. tag details on Docker Hub)
 
+.. _base-tunables:
 
 Tunables
 ++++++++
@@ -222,6 +231,7 @@ provides. These are mostly based on environment variables (very common with cont
 .. [dump-option] ``-XX:+HeapDumpOnOutOfMemoryError``
 
 
+.. _base-locations:
 
 Locations
 +++++++++
@@ -303,6 +313,8 @@ named Docker volume in these places to avoid data loss, gain performance and/or
         You should mount some storage here (disk or ephemeral).
 
 
+.. _base-exposed-ports:
+
 Exposed Ports
 +++++++++++++
 
diff --git a/doc/sphinx-guides/source/container/configbaker-image.rst b/doc/sphinx-guides/source/container/configbaker-image.rst
new file mode 100644
index 00000000000..d098bd46436
--- /dev/null
+++ b/doc/sphinx-guides/source/container/configbaker-image.rst
@@ -0,0 +1,231 @@
+Config Baker Image
+==================
+
+The config baker container may be used to execute all sorts of tasks around setting up, preparing and finalizing
+an instance of the Dataverse software. Its focus is bootstrapping non-initialized installations.
+
+.. contents:: |toctitle|
+    :local:
+
+Quickstart
+++++++++++
+
+To see the Config Baker help screen:
+
+``docker run -it --rm gdcc/configbaker:unstable``
+
+Supported Image Tags
+++++++++++++++++++++
+
+This image is sourced from the main upstream code `repository of the Dataverse software <https://github.com/IQSS/dataverse>`_.
+Development and maintenance of the `image's code <https://github.com/IQSS/dataverse/tree/develop/modules/container-configbaker>`_
+happens there (again, by the community). Community-supported image tags are based on the two most important
+upstream branches:
+
+- The ``unstable`` tag corresponds to the ``develop`` branch, where pull requests are merged.
+  (`Dockerfile <https://github.com/IQSS/dataverse/tree/develop/modules/container-configbaker/src/main/docker/Dockerfile>`__)
+- The ``alpha`` tag corresponds to the ``master`` branch, where releases are cut from.
+  (`Dockerfile <https://github.com/IQSS/dataverse/tree/master/modules/container-configbaker/src/main/docker/Dockerfile>`__)
+
+
+
+Image Contents
+++++++++++++++
+
+This image contains some crucial parts to make a freshly baked Dataverse installation usable.
+
+Scripts
+^^^^^^^
+
+.. list-table::
+  :align: left
+  :widths: 20 80
+  :header-rows: 1
+
+  * - Script
+    - Description
+  * - ``bootstrap.sh``
+    - Run an initialization script contained in a persona. See ``bootstrap.sh -h`` for usage details.
+      For development purposes, use ``bootstrap.sh dev`` or provide your own.
+  * - ``fix-fs-perms.sh``
+    - Fixes filesystem permissions. App and Solr container run as non-privileged users and might need adjusted
+      filesystem permissions on mounted volumes to be able to write data. Run without parameters to see usage details.
+  * - ``help.sh``
+    - Default script when running container without parameters. Lists available scripts and details about them.
+  * - ``update-fields.sh``
+    - Update a Solr ``schema.xml`` with a given list of metadata fields. See ``update-fields.sh -h`` for usage details
+      and :ref:`update-solr-schema` for an example use case.
+
+Solr Template
+^^^^^^^^^^^^^
+
+In addition, at ``/template`` a `Solr Configset <https://solr.apache.org/guide/solr/latest/configuration-guide/config-sets.html>`_
+is available, ready for Dataverse usage with a tuned core config and schema.
+
+Providing this template to a vanilla Solr image and using `solr-precreate <https://solr.apache.org/guide/solr/latest/deployment-guide/solr-in-docker.html#using-solr-precreate-command>`_
+with it will create the necessary Solr search index.
+
+The ``solrconfig.xml`` and ``schema.xml`` are included from the upstream project ``conf/solr/...`` folder. You are
+obviously free to provide such a template in some other way, maybe tuned for your purposes.
+As a start, the contained script ``update-fields.sh`` may be used to edit the field definitions.
+
+
+
+Build Instructions
+++++++++++++++++++
+
+Assuming you have `Docker <https://docs.docker.com/engine/install/>`_, `Docker Desktop <https://www.docker.com/products/docker-desktop/>`_,
+`Moby <https://mobyproject.org/>`_ or some remote Docker host configured, up and running from here on.
+Note: You need to use Maven when building this image, as we collate selective files from different places of the upstream
+repository. (Building with pure Docker Compose does not support this kind of selection.)
+
+By default, when building the application image, it will also create a new config baker image. Simply execute the
+Maven modules packaging target with activated "container" profile from the projects Git root to build the image:
+
+``mvn -Pct package``
+
+If you specifically want to build a config baker image *only*, try
+
+``mvn -Pct docker:build -Ddocker.filter=dev_bootstrap``
+
+The build of config baker involves copying Solr configset files. The Solr version used is inherited from Maven,
+acting as the single source of truth. Also, the tag of the image should correspond the application image, as
+their usage is intertwined.
+
+Some additional notes, using Maven parameters to change the build and use ...:
+
+- | ... a different tag only: add ``-Dconf.image.tag=tag``.
+  | *Note:* default is ``${app.image.tag}``, which defaults to ``unstable``
+- | ... a different image name and tag: add ``-Dconf.image=name:tag``.
+  | *Note:* default is ``gdcc/configbaker:${conf.image.tag}``
+- ... a different image registry than Docker Hub: add ``-Ddocker.registry=registry.example.org`` (see also
+  `DMP docs on registries <https://dmp.fabric8.io/#registry>`__)
+- ... a different Solr version: use ``-Dsolr.version=x.y.z``
+
+Processor Architecture and Multiarch
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This image is published as a "multi-arch image", supporting the most common architectures Dataverse usually runs on:
+AMD64 (Windows/Linux/...) and ARM64 (Apple M1/M2), by using `Maven Docker Plugin's BuildX mode <https://dmp.fabric8.io/#build-buildx>`_.
+
+Building the image via ``mvn -Pct package``, etc. will only build for the architecture of the Docker machine's CPU.
+
+Only ``mvn -Pct deploy -Ddocker.platforms=linux/amd64,linux/arm64`` will trigger building on all enabled architectures.
+Yet, to enable building with non-native code on your build machine, you will need to setup a cross-platform builder.
+
+On Linux, you should install `qemu-user-static <https://github.com/multiarch/qemu-user-static>`__ (preferably via
+your package management) on the host and run ``docker run --rm --privileged multiarch/qemu-user-static --reset -p yes``
+to enable that builder. The Docker plugin will setup everything else for you.
+
+
+
+Tunables
+++++++++
+
+This image has no tunable runtime parameters yet.
+
+
+
+Locations
++++++++++
+
+.. list-table::
+    :align: left
+    :width: 100
+    :widths: 10 10 50
+    :header-rows: 1
+
+    * - Location
+      - Value
+      - Description
+    * - ``${SCRIPT_DIR}``
+      - ``/scripts``
+      - Place to store the scripts. Part of ``$PATH``.
+    * - ``${SOLR_TEMPLATE}``
+      - ``/template``
+      - Place where the Solr Configset resides to create an index core from it.
+    * - ``${BOOTSTRAP_DIR}``
+      - ``/scripts/bootstrap``
+      - Stores the bootstrapping personas in sub-folders.
+    * - ``${BOOTSTRAP_DIR}/base``
+      - ``/scripts/bootstrap/base``
+      - Minimal set of scripts and data from upstream ``scripts/api`` folder, just enough for the most basic setup.
+        The idea is that other personas may reuse it within their own ``init.sh``, avoiding (some) code duplication.
+        See ``dev`` persona for an example.
+
+
+
+Exposed Ports
++++++++++++++
+
+This image contains no runnable services yet, so no ports exposed.
+
+
+
+Entry & Extension Points
+++++++++++++++++++++++++
+
+The entrypoint of this image is pinned to ``dumb-init`` to safeguard signal handling. You may feed any script or
+executable to it as command.
+
+By using our released images as base image to add your own scripting, personas, Solr configset and so on, simply
+adapt and alter any aspect you need changed.
+
+
+
+Examples
+++++++++
+
+Docker Compose snippet to wait for Dataverse deployment and execute bootstrapping using a custom persona you added
+by bind mounting (as an alternative to extending the image):
+
+.. code-block:: yaml
+
+  bootstrap:
+    image: gdcc/configbaker:unstable
+    restart: "no"
+    command:
+      - bootstrap.sh
+      - mypersona
+    volumes:
+      - ./mypersona:/scripts/bootstrap/mypersona
+    networks:
+      - dataverse
+
+Docker Compose snippet to prepare execution of Solr and copy your custom configset you added by bind mounting
+(instead of an extension). Note that ``solr-precreate`` will not overwrite an already existing core! To update
+the config of an existing core, you need to mount the right volume with the stateful data!
+
+.. code-block:: yaml
+
+  solr_initializer:
+    container_name: solr_initializer
+    image: gdcc/configbaker:unstable
+    restart: "no"
+    command:
+      - sh
+      - -c
+      - "fix-fs-perms.sh solr && cp -a /template/* /solr-template"
+    volumes:
+      - ./volumes/solr/data:/var/solr
+      - ./volumes/solr/conf:/solr-template
+      - /tmp/my-generated-configset:/template
+
+  solr:
+    container_name: solr
+    hostname: solr
+    image: solr:${SOLR_VERSION}
+    depends_on:
+      - dev_solr_initializer
+    restart: on-failure
+    ports:
+      - "8983:8983"
+    networks:
+      - dataverse
+    command:
+      - "solr-precreate"
+      - "collection1"
+      - "/template"
+    volumes:
+      - ./volumes/solr/data:/var/solr
+      - ./volumes/solr/conf:/template
diff --git a/doc/sphinx-guides/source/container/dev-usage.rst b/doc/sphinx-guides/source/container/dev-usage.rst
new file mode 100644
index 00000000000..b2547306b03
--- /dev/null
+++ b/doc/sphinx-guides/source/container/dev-usage.rst
@@ -0,0 +1,200 @@
+Development Usage
+=================
+
+Please note! This Docker setup is not for production!
+
+.. contents:: |toctitle|
+        :local:
+
+Quickstart
+----------
+
+See :ref:`container-dev-quickstart`.
+
+Intro
+-----
+
+Assuming you have `Docker <https://docs.docker.com/engine/install/>`_, `Docker Desktop <https://www.docker.com/products/docker-desktop/>`_,
+`Moby <https://mobyproject.org/>`_ or some remote Docker host configured, up and running from here on. Also assuming
+you have Java and Maven installed, as you are at least about to develop code changes.
+
+To test drive these local changes to the Dataverse codebase in a containerized application server (and avoid the
+setup described in :doc:`../developers/dev-environment`), you must a) build the application and b) run it in addition
+to the necessary dependencies. (Which might involve building a new local version of the :doc:`configbaker-image`.)
+
+.. _dev-build:
+
+Building
+--------
+
+To build the :doc:`application <app-image>` and :doc:`config baker image <configbaker-image>`, run the following command:
+
+``mvn -Pct clean package``
+
+Once this is done, you will see images ``gdcc/dataverse:unstable`` and ``gdcc/configbaker:unstable`` available in your
+Docker cache.
+
+**Note:** This will skip any unit tests. If you have built the code before for testing, etc. you might omit the
+``clean`` to avoid recompiling.
+
+**Note:** Also we have a ``docker-compose-dev.yml`` file, it's currently not possible to build the images without
+invoking Maven. This might change in the future.
+
+
+.. _dev-run:
+
+Running
+-------
+
+After building the app and config baker image containing your local changes to the Dataverse application, you want to
+run it together with all dependencies. There are four ways to do this (commands executed at root of project directory):
+
+.. list-table:: Cheatsheet: Running Containers
+   :widths: 15 40 45
+   :header-rows: 1
+   :stub-columns: 1
+   :align: left
+
+   * - \
+     - Using Maven
+     - Using Compose
+   * - In foreground
+     - ``mvn -Pct docker:run``
+     - ``docker compose -f docker-compose-dev.yml up``
+   * - In background
+     - ``mvn -Pct docker:start``
+     - ``docker compose -f docker-compose-dev.yml up -d``
+
+Both ways have their pros and cons:
+
+.. list-table:: Decision Helper: Fore- or Background?
+   :widths: 15 40 45
+   :header-rows: 1
+   :stub-columns: 1
+   :align: left
+
+   * - \
+     - Pros
+     - Cons
+   * - Foreground
+     - | Logs scroll by when interacting with API / UI
+       | To stop all containers simply hit ``Ctrl+C``
+     - | Lots and lots of logs scrolling by
+       | Must stop all containers to restart
+   * - Background
+     - | No logs scrolling by
+       | Easy to replace single containers
+     - | No logs scrolling by
+       | Stopping containers needs an extra command
+
+In case you want to concatenate building and running, here's a cheatsheet for you:
+
+.. list-table:: Cheatsheet: Building and Running Containers
+   :widths: 15 40 45
+   :header-rows: 1
+   :stub-columns: 1
+   :align: left
+
+   * - \
+     - Using Maven
+     - Using Compose
+   * - In foreground
+     - ``mvn -Pct package docker:run``
+     - ``mvn -Pct package && docker compose -f docker-compose-dev.yml up``
+   * - In background
+     - ``mvn -Pct package docker:start``
+     - ``mvn -Pct package && docker compose -f docker-compose-dev.yml up -d``
+
+Once all containers have been started, you can check if the application was deployed correctly by checking the version
+at http://localhost:8080/api/info/version or watch the logs.
+
+**Note:** To stop all containers you started in background, invoke ``mvn -Pct docker:stop`` or
+``docker compose -f docker-compose-dev.yml down``.
+
+Check that you can log in to http://localhost:8080 using user ``dataverseAdmin`` and password ``admin1``.
+
+You can also access the Payara Admin Console if needed, which is available at http://localhost:4848. To log in, use
+user ``admin`` and password ``admin``. As a reminder, the application container is for development use only, so we
+are exposing the admin console for testing purposes. In a production environment, it may be more convenient to leave
+this console unopened.
+
+Note that data is persisted in ``./docker-dev-volumes`` in the root of the Git repo. For a clean start, you should
+remove this directory before running the ``mvn`` commands above.
+
+
+.. _dev-logs:
+
+Viewing Logs
+------------
+
+In case you started containers in background mode (see :ref:`dev-run`), you can use the following commands to view and/or
+watch logs from the containers.
+
+The safe bet for any running container's logs is to lookup the container name via ``docker ps`` and use it in
+``docker logs <name>``. You can tail logs by adding ``-n`` and follow them by adding ``-f`` (just like ``tail`` cmd).
+See ``docker logs --help`` for more.
+
+Alternatives:
+
+- In case you used Maven for running, you may use ``mvn -Pct docker:logs -Ddocker.filter=<service name>``.
+- If you used Docker Compose for running, you may use ``docker compose -f docker-compose-dev.yml logs <service name>``.
+  Options are the same.
+
+
+Redeploying
+-----------
+
+Rebuild and Running Images
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The safest way to redeploy code is to stop the running containers (with Ctrl-c if you started them in the foreground) and then build and run them again with ``mvn -Pct clean package docker:run``.
+
+IntelliJ IDEA Ultimate and Payara Platform Tools
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you have IntelliJ IDEA Ultimate (note that `free educational licenses <https://www.jetbrains.com/community/education/>`_ are available), you can install `Payara Platform Tools <https://plugins.jetbrains.com/plugin/15114-payara-platform-tools>`_ which can dramatically improve your feedback loop when iterating on code.
+
+The following steps are suggested:
+
+- Go to the Payara admin console (either at https://localhost:4848 or http://localhost:4849) and undeploy the dataverse application under "Applications".
+- Install Payara Platform Tools.
+- Under "Server":
+
+  - Click "Run" then "Edit Configurations".
+  - Click the plus sign and scroll down to Payara Server and click "Remote".
+  - For "Name" put "Payara in Docker" or something reasonable.
+  - Under "Application server" select a local directory that has the same version of Payara used in the container. This should match the version of Payara mentioned in the Installation Guide under :ref:`payara`.
+  - Change "Admin Server Port" to 4849.
+  - For username, put "admin".
+  - For password, put "admin".
+
+- Under "Deployment":
+
+  - Click the plus button and clien "Artifact" then "dataverse:war".
+
+- Under "Startup/Connection":
+
+  - Click "Debug" and change the port to 9009.
+
+- Click "Run" and then "Debug Payara in Docker". This initial deployment will take some time.
+- Go to http://localhost:8080/api/info/version and make sure the API is responding.
+- Edit ``Info.java`` and make a small change to the ``/api/info/version`` code.
+- Click "Run" then "Debugging Actions" then "Reload Changed Classes". The deployment should only take a few seconds.
+- Go to http://localhost:8080/api/info/version and verify the change you made.
+
+Using a Debugger
+----------------
+
+The :doc:`base-image` enables usage of the `Java Debugging Wire Protocol <https://dzone.com/articles/remote-debugging-java-applications-with-jdwp>`_
+for remote debugging if you set ``ENABLE_JDWP=1`` as environment variable for the application container.
+The default configuration when executing containers with the commands listed at :ref:`dev-run` already enables this.
+
+There are a lot of tutorials how to connect your IDE's debugger to a remote endpoint. Please use ``localhost:9009``
+as the endpoint. Here are links to the most common IDEs docs on remote debugging:
+`Eclipse <https://help.eclipse.org/latest/topic/org.eclipse.jdt.doc.user/concepts/cremdbug.htm?cp=1_2_12>`_,
+`IntelliJ <https://www.jetbrains.com/help/idea/tutorial-remote-debug.html#debugger_rc>`_
+
+Building Your Own Base Image
+----------------------------
+
+If you find yourself tasked with upgrading Payara, you will need to create your own base image before running the :ref:`container-dev-quickstart`. For instructions, see :doc:`base-image`.
diff --git a/doc/sphinx-guides/source/container/index.rst b/doc/sphinx-guides/source/container/index.rst
index 92ac94e2cf2..4bbc87a4845 100644
--- a/doc/sphinx-guides/source/container/index.rst
+++ b/doc/sphinx-guides/source/container/index.rst
@@ -23,5 +23,7 @@ develop and extend them further are provided.
 
 .. toctree::
 
+  dev-usage
   base-image
-
+  app-image
+  configbaker-image
diff --git a/doc/sphinx-guides/source/developers/api-design.rst b/doc/sphinx-guides/source/developers/api-design.rst
new file mode 100755
index 00000000000..e7a7a6408bb
--- /dev/null
+++ b/doc/sphinx-guides/source/developers/api-design.rst
@@ -0,0 +1,63 @@
+==========
+API Design
+==========
+
+API design is a large topic. We expect this page to grow over time.
+
+.. contents:: |toctitle|
+	:local:
+
+Paths
+-----
+
+A reminder `from Wikipedia <https://en.wikipedia.org/wiki/Uniform_Resource_Identifier>`_ of what a path is:
+
+.. code-block:: bash
+
+          userinfo       host      port
+          ┌──┴───┐ ┌──────┴──────┐ ┌┴┐
+  https://john.doe@www.example.com:123/forum/questions/?tag=networking&order=newest#top
+  └─┬─┘   └─────────────┬────────────┘└───────┬───────┘ └────────────┬────────────┘ └┬┘
+  scheme          authority                  path                  query           fragment
+
+Exposing Settings
+~~~~~~~~~~~~~~~~~
+
+Since Dataverse 4, database settings have been exposed via API at http://localhost:8080/api/admin/settings
+
+(JVM options are probably available via the Payara REST API, but this is out of scope.)
+
+Settings need to be exposed outside to API clients outside of ``/api/admin`` (which is typically restricted to localhost). Here are some guidelines to follow when exposing settings.
+
+- When you are exposing a database setting as-is:
+
+  - Use ``/api/info/settings`` as the root path.
+
+  - Append the name of the setting including the colon (e.g. ``:DatasetPublishPopupCustomText``)
+
+  - Final path example: ``/api/info/settings/:DatasetPublishPopupCustomText``
+
+- If the absence of the database setting is filled in by a default value (e.g. ``:ZipDownloadLimit`` or ``:ApiTermsOfUse``):
+
+  - Use ``/api/info`` as the root path.
+
+  - Append the setting but remove the colon and downcase the first character (e.g. ``zipDownloadLimit``)
+
+  - Final path example: ``/api/info/zipDownloadLimit``
+
+- If the database setting you're exposing make more sense outside of ``/api/info`` because there's more context (e.g. ``:CustomDatasetSummaryFields``):
+
+  - Feel free to use a path outside of ``/api/info`` as the root path.
+
+  - Given additional context, append a shortened name (e.g. ``/api/datasets/summaryFieldNames``).
+
+  - Final path example: ``/api/datasets/summaryFieldNames``
+
+- If you need to expose a JVM option (MicroProfile setting) such as ``dataverse.api.allow-incomplete-metadata``:
+
+  - Use ``/api/info`` as the root path.
+
+  - Append a meaningful name for the setting (e.g. ``incompleteMetadataViaApi``).
+
+  - Final path example: ``/api/info/incompleteMetadataViaApi``
+
diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst
index 0a3dd23ed23..8d891e63317 100644
--- a/doc/sphinx-guides/source/developers/big-data-support.rst
+++ b/doc/sphinx-guides/source/developers/big-data-support.rst
@@ -36,6 +36,18 @@ At present, one potential drawback for direct-upload is that files are only part
 
 ``./asadmin create-jvm-options "-Ddataverse.files.<id>.ingestsizelimit=<size in bytes>"``
 
+.. _s3-direct-upload-features-disabled:
+
+Features that are Disabled if S3 Direct Upload is Enabled
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following features are disabled when S3 direct upload is enabled.
+
+- Unzipping of zip files. (See :ref:`compressed-files`.)
+- Extraction of metadata from FITS files. (See :ref:`fits`.)
+- Creation of NcML auxiliary files (See :ref:`netcdf-and-hdf5`.)
+- Extraction of a geospatial bounding box from NetCDF and HDF5 files (see :ref:`netcdf-and-hdf5`) unless :ref:`dataverse.netcdf.geo-extract-s3-direct-upload` is set to true.
+
 .. _cors-s3-bucket:
 
 Allow CORS for S3 Buckets
@@ -62,7 +74,7 @@ with the contents of the file cors.json as follows:
                 "AllowedOrigins": ["*"],
                 "AllowedHeaders": ["*"],
                 "AllowedMethods": ["PUT", "GET"],
-                "ExposeHeaders": ["ETag"]
+                "ExposeHeaders": ["ETag", "Accept-Ranges", "Content-Encoding", "Content-Range"]
              }
           ]
         }
@@ -137,30 +149,45 @@ Globus File Transfer
 
 Note: Globus file transfer is still experimental but feedback is welcome! See :ref:`support`.
 
-Users can transfer files via `Globus <ttps://www.globus.org>`_ into and out of datasets when their Dataverse installation is configured to use a Globus accessible S3 store and a community-developed `dataverse-globus <https://github.com/scholarsportal/dataverse-globus>`_ "transfer" app has been properly installed and configured.
-
-Due to differences in the access control models of a Dataverse installation and Globus, enabling the Globus capability on a store will disable the ability to restrict and embargo files in that store.
+Users can transfer files via `Globus <https://www.globus.org>`_ into and out of datasets, or reference files on a remote Globus endpoint, when their Dataverse installation is configured to use a Globus accessible store(s) 
+and a community-developed `dataverse-globus <https://github.com/scholarsportal/dataverse-globus>`_ app has been properly installed and configured.
 
-As Globus aficionados know, Globus endpoints can be in a variety of places, from data centers to personal computers. This means that from within the Dataverse software, a Globus transfer can feel like an upload or a download (with Globus Personal Connect running on your laptop, for example) or it can feel like a true transfer from one server to another (from a cluster in a data center into a Dataverse dataset or vice versa).
+Globus endpoints can be in a variety of places, from data centers to personal computers. 
+This means that from within the Dataverse software, a Globus transfer can feel like an upload or a download (with Globus Personal Connect running on your laptop, for example) or it can feel like a true transfer from one server to another (from a cluster in a data center into a Dataverse dataset or vice versa).
 
-Globus transfer uses a very efficient transfer mechanism and has additional features that make it suitable for large files and large numbers of files:
+Globus transfer uses an efficient transfer mechanism and has additional features that make it suitable for large files and large numbers of files:
 
 * robust file transfer capable of restarting after network or endpoint failures
 * third-party transfer, which enables a user accessing a Dataverse installation in their desktop browser to initiate transfer of their files from a remote endpoint (i.e. on a local high-performance computing cluster), directly to an S3 store managed by the Dataverse installation
 
-Globus transfer requires use of the Globus S3 connector which requires a paid Globus subscription at the host institution. Users will need a Globus account which could be obtained via their institution or directly from Globus (at no cost).
+Note: Due to differences in the access control models of a Dataverse installation and Globus and the current Globus store model, Dataverse cannot enforce per-file-access restrictions.
+It is therefore recommended that a store be configured as public, which disables the ability to restrict and embargo files in that store, when Globus access is allowed.
+
+Dataverse supports three options for using Globus, two involving transfer to Dataverse-managed endpoints and one allowing Dataverse to reference files on remote endpoints.
+Dataverse-managed endpoints must be Globus 'guest collections' hosted on either a file-system-based endpoint or an S3-based endpoint (the latter requires use of the Globus
+S3 connector which requires a paid Globus subscription at the host institution). In either case, Dataverse is configured with the Globus credentials of a user account that can manage the endpoint.
+Users will need a Globus account, which can be obtained via their institution or directly from Globus (at no cost).
 
-The setup required to enable Globus is described in the `Community Dataverse-Globus Setup and Configuration document <https://docs.google.com/document/d/1mwY3IVv8_wTspQC0d4ddFrD2deqwr-V5iAGHgOy4Ch8/edit?usp=sharing>`_ and the references therein.
+With the file-system endpoint, Dataverse does not currently have access to the file contents. Thus, functionality related to ingest, previews, fixity hash validation, etc. are not available. (Using the S3-based endpoint, Dataverse has access via S3 and all functionality normally associated with direct uploads to S3 is available.)
+
+For the reference use case, Dataverse must be configured with a list of allowed endpoint/base paths from which files may be referenced. In this case, since Dataverse is not accessing the remote endpoint itself, it does not need Globus credentials. 
+Users will need a Globus account in this case, and the remote endpoint must be configured to allow them access (i.e. be publicly readable, or potentially involving some out-of-band mechanism to request access (that could be described in the dataset's Terms of Use and Access).
+
+All of Dataverse's Globus capabilities are now store-based (see the store documentation) and therefore different collections/datasets can be configured to use different Globus-capable stores (or normal file, S3 stores, etc.)
+
+More details of the setup required to enable Globus is described in the `Community Dataverse-Globus Setup and Configuration document <https://docs.google.com/document/d/1mwY3IVv8_wTspQC0d4ddFrD2deqwr-V5iAGHgOy4Ch8/edit?usp=sharing>`_ and the references therein.
 
 As described in that document, Globus transfers can be initiated by choosing the Globus option in the dataset upload panel. (Globus, which does asynchronous transfers, is not available during dataset creation.) Analogously, "Globus Transfer" is one of the download options in the "Access Dataset" menu and optionally the file landing page download menu (if/when supported in the dataverse-globus app).
 
 An overview of the control and data transfer interactions between components was presented at the 2022 Dataverse Community Meeting and can be viewed in the `Integrations and Tools Session Video <https://youtu.be/3ek7F_Dxcjk?t=5289>`_ around the 1 hr 28 min mark.
 
-See also :ref:`Globus settings <:GlobusBasicToken>`.
+See also :ref:`Globus settings <:GlobusSettings>`.
 
 Data Capture Module (DCM)
 -------------------------
 
+Please note: The DCM feature is deprecated.
+
 Data Capture Module (DCM) is an experimental component that allows users to upload large datasets via rsync over ssh.
 
 DCM was developed and tested using Glassfish but these docs have been updated with references to Payara.
@@ -197,7 +224,7 @@ The JSON that a DCM sends to your Dataverse installation on successful checksum
    :language: json
 
 - ``status`` - The valid strings to send are ``validation passed`` and ``validation failed``.
-- ``uploadFolder`` - This is the directory on disk where your Dataverse installation should attempt to find the files that a DCM has moved into place. There should always be a ``files.sha`` file and a least one data file. ``files.sha`` is a manifest of all the data files and their checksums. The ``uploadFolder`` directory is inside the directory where data is stored for the dataset and may have the same name as the "identifier" of the persistent id (DOI or Handle). For example, you would send ``"uploadFolder": "DNXV2H"`` in the JSON file when the absolute path to this directory is ``/usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/DNXV2H/DNXV2H``.
+- ``uploadFolder`` - This is the directory on disk where your Dataverse installation should attempt to find the files that a DCM has moved into place. There should always be a ``files.sha`` file and a least one data file. ``files.sha`` is a manifest of all the data files and their checksums. The ``uploadFolder`` directory is inside the directory where data is stored for the dataset and may have the same name as the "identifier" of the persistent id (DOI or Handle). For example, you would send ``"uploadFolder": "DNXV2H"`` in the JSON file when the absolute path to this directory is ``/usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/DNXV2H/DNXV2H``.
 - ``totalSize`` - Your Dataverse installation will use this value to represent the total size in bytes of all the files in the "package" that's created. If 360 data files and one ``files.sha`` manifest file are in the ``uploadFolder``, this value is the sum of the 360 data files.
 
 
@@ -219,9 +246,9 @@ Add Dataverse Installation settings to use mock (same as using DCM, noted above)
 
 At this point you should be able to download a placeholder rsync script. Your Dataverse installation is then waiting for news from the DCM about if checksum validation has succeeded or not. First, you have to put files in place, which is usually the job of the DCM. You should substitute "X1METO" for the "identifier" of the dataset you create. You must also use the proper path for where you store files in your dev environment.
 
-- ``mkdir /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO``
-- ``mkdir /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO``
-- ``cd /usr/local/payara5/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO``
+- ``mkdir /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO``
+- ``mkdir /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO``
+- ``cd /usr/local/payara6/glassfish/domains/domain1/files/10.5072/FK2/X1METO/X1METO``
 - ``echo "hello" > file1.txt``
 - ``shasum file1.txt > files.sha``
 
@@ -236,104 +263,11 @@ The following low level command should only be used when troubleshooting the "im
 
 ``curl -H "X-Dataverse-key: $API_TOKEN" -X POST "$DV_BASE_URL/api/batch/jobs/import/datasets/files/$DATASET_DB_ID?uploadFolder=$UPLOAD_FOLDER&totalSize=$TOTAL_SIZE"``
 
-Steps to set up a DCM via Docker for Development
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you need a fully operating DCM client for development purposes, these steps will guide you to setting one up. This includes steps to set up the DCM on S3 variant.
-
-Docker Image Set-up
-^^^^^^^^^^^^^^^^^^^
-
-See https://github.com/IQSS/dataverse/blob/develop/conf/docker-dcm/readme.md
-
-- Install docker if you do not have it
-      
-Optional steps for setting up the S3 Docker DCM Variant
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-- Before: the default bucket for DCM to hold files in S3 is named test-dcm. It is coded into `post_upload_s3.bash` (line 30). Change to a different bucket if needed.
-- Also Note: With the new support for multiple file store in the Dataverse Software, DCM requires a store with id="s3" and DCM will only work with this store.
-
-  - Add AWS bucket info to dcmsrv
-    - Add AWS credentials to ``~/.aws/credentials``
-
-      - ``[default]``
-      - ``aws_access_key_id =``
-      - ``aws_secret_access_key =``
-
-- Dataverse installation configuration (on dvsrv):
-
-  - Set S3 as the storage driver
-
-    - ``cd /opt/payara5/bin/``
-    - ``./asadmin delete-jvm-options "\-Ddataverse.files.storage-driver-id=file"``
-    - ``./asadmin create-jvm-options "\-Ddataverse.files.storage-driver-id=s3"``
-    - ``./asadmin create-jvm-options "\-Ddataverse.files.s3.type=s3"``
-    - ``./asadmin create-jvm-options "\-Ddataverse.files.s3.label=s3"``
-    
-
-  - Add AWS bucket info to your Dataverse installation
-    - Add AWS credentials to ``~/.aws/credentials``
-    
-      - ``[default]``
-      - ``aws_access_key_id =``
-      - ``aws_secret_access_key =``
-
-    - Also: set region in ``~/.aws/config`` to create a region file. Add these contents:
-
-      - ``[default]``
-      - ``region = us-east-1``
-
-  - Add the S3 bucket names to your Dataverse installation
-
-    - S3 bucket for your Dataverse installation
-
-      - ``/usr/local/payara5/glassfish/bin/asadmin create-jvm-options "-Ddataverse.files.s3.bucket-name=iqsstestdcmbucket"``
-
-    - S3 bucket for DCM (as your Dataverse installation needs to do the copy over)
-
-      - ``/usr/local/payara5/glassfish/bin/asadmin create-jvm-options "-Ddataverse.files.dcm-s3-bucket-name=test-dcm"``
-
-  - Set download method to be HTTP, as DCM downloads through S3 are over this protocol ``curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "native/http"``
-
-Using the DCM Docker Containers
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-For using these commands, you will need to connect to the shell prompt inside various containers (e.g. ``docker exec -it dvsrv /bin/bash``)
-
-- Create a dataset and download rsync upload script
-
-  - connect to client container: ``docker exec -it dcm_client bash``
-  - create dataset: ``cd /mnt ; ./create.bash`` ; this will echo the database ID to stdout
-  - download transfer script: ``./get_transfer.bash $database_id_from_create_script``
-  - execute the transfer script: ``bash ./upload-${database_id_from-create_script}.bash`` , and follow instructions from script.
-
-- Run script
-
-  - e.g. ``bash ./upload-3.bash`` (``3`` being the database id from earlier commands in this example).
-
-- Manually run post upload script on dcmsrv
-
-  - for posix implementation: ``docker exec -it dcmsrv /opt/dcm/scn/post_upload.bash``
-  - for S3 implementation: ``docker exec -it dcmsrv /opt/dcm/scn/post_upload_s3.bash``
-
-Additional DCM docker development tips
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-- You can completely blow away all the docker images with these commands (including non DCM ones!)
-  - ``docker-compose -f docmer-compose.yml down -v``
-
-- There are a few logs to tail
-
-  - dvsrv : ``tail -n 2000 -f /opt/payara5/glassfish/domains/domain1/logs/server.log``
-  - dcmsrv : ``tail -n 2000 -f /var/log/lighttpd/breakage.log``
-  - dcmsrv : ``tail -n 2000 -f /var/log/lighttpd/access.log``
-
-- You may have to restart the app server domain occasionally to deal with memory filling up. If deployment is getting reallllllly slow, its a good time.
-
 Repository Storage Abstraction Layer (RSAL)
 -------------------------------------------
 
+Please note: The RSAL feature is deprecated.
+
 Steps to set up a DCM via Docker for Development
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
diff --git a/doc/sphinx-guides/source/developers/classic-dev-env.rst b/doc/sphinx-guides/source/developers/classic-dev-env.rst
new file mode 100755
index 00000000000..d7b7f281634
--- /dev/null
+++ b/doc/sphinx-guides/source/developers/classic-dev-env.rst
@@ -0,0 +1,266 @@
+=======================
+Classic Dev Environment
+=======================
+
+These are the old instructions we used for Dataverse 4 and 5. They should still work but these days we favor running Dataverse in Docker as described in :doc:`dev-environment`.
+
+These instructions are purposefully opinionated and terse to help you get your development environment up and running as quickly as possible! Please note that familiarity with running commands from the terminal is assumed.
+
+.. contents:: |toctitle|
+	:local:
+
+Quick Start (Docker)
+--------------------
+
+The quickest way to get Dataverse running is in Docker as explained in :doc:`../container/dev-usage` section of the Container Guide.
+
+
+Classic Dev Environment
+-----------------------
+
+Since before Docker existed, we have encouraged installing Dataverse and all its dependencies directly on your development machine, as described below. This can be thought of as the "classic" development environment for Dataverse.
+
+However, in 2023 we decided that we'd like to encourage all developers to start using Docker instead and opened https://github.com/IQSS/dataverse/issues/9616 to indicate that we plan to rewrite this page to recommend the use of Docker.
+
+There's nothing wrong with the classic instructions below and we don't plan to simply delete them. They are a valid alternative to running Dataverse in Docker. We will likely move them to another page.
+
+Set Up Dependencies
+-------------------
+
+Supported Operating Systems
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Mac OS X or Linux is required because the setup scripts assume the presence of standard Unix utilities.
+
+Windows is gaining support through Docker as described in the :doc:`windows` section.
+
+Install Java
+~~~~~~~~~~~~
+
+The Dataverse Software requires Java 11.
+
+We suggest downloading OpenJDK from https://adoptopenjdk.net
+
+On Linux, you are welcome to use the OpenJDK available from package managers.
+
+Install Netbeans or Maven
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+NetBeans IDE is recommended, and can be downloaded from https://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven.
+
+Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section.
+
+Install Homebrew (Mac Only)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+On Mac, install Homebrew to simplify the steps below: https://brew.sh
+
+Clone the Dataverse Software Git Repo
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Fork https://github.com/IQSS/dataverse and then clone your fork like this:
+
+``git clone git@github.com:[YOUR GITHUB USERNAME]/dataverse.git``
+
+Build the Dataverse Software War File
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you installed Netbeans, follow these steps:
+
+- Launch Netbeans and click "File" and then "Open Project". Navigate to where you put the Dataverse Software code and double-click "Dataverse" to open the project.
+- If you see "resolve project problems," go ahead and let Netbeans try to resolve them. This will probably including downloading dependencies, which can take a while.
+- Allow Netbeans to install nb-javac (required for Java 8 and below).
+- Select "Dataverse" under Projects and click "Run" in the menu and then "Build Project (Dataverse)". Check back for "BUILD SUCCESS" at the end.
+
+If you installed Maven instead of Netbeans, run ``mvn package``. Check for "BUILD SUCCESS" at the end.
+
+NOTE: Do you use a locale different than ``en_US.UTF-8`` on your development machine? Are you in a different timezone
+than Harvard (Eastern Time)? You might experience issues while running tests that were written with these settings
+in mind. The Maven  ``pom.xml`` tries to handle this for you by setting the locale to ``en_US.UTF-8`` and timezone
+``UTC``, but more, not yet discovered building or testing problems might lurk in the shadows.
+
+Install jq
+~~~~~~~~~~
+
+On Mac, run this command:
+
+``brew install jq``
+
+On Linux, install ``jq`` from your package manager or download a binary from https://stedolan.github.io/jq/
+
+Install Payara
+~~~~~~~~~~~~~~
+
+Payara 6.2023.8 or higher is required.
+
+To install Payara, run the following commands:
+
+``cd /usr/local``
+
+``sudo curl -O -L https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip``
+
+``sudo unzip payara-6.2023.8.zip``
+
+``sudo chown -R $USER /usr/local/payara6``
+
+If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/
+
+Install Service Dependencies Directly on localhost
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Install PostgreSQL
+^^^^^^^^^^^^^^^^^^
+
+The Dataverse Software has been tested with PostgreSQL versions up to 13. PostgreSQL version 10+ is required.
+
+On Mac, go to https://www.postgresql.org/download/macosx/ and choose "Interactive installer by EDB" option. Note that version 13.5 is used in the command line examples below, but the process should be similar for other versions. When prompted to set a password for the "database superuser (postgres)" just enter "password".
+
+After installation is complete, make a backup of the ``pg_hba.conf`` file like this:
+
+``sudo cp /Library/PostgreSQL/13/data/pg_hba.conf /Library/PostgreSQL/13/data/pg_hba.conf.orig``
+
+Then edit ``pg_hba.conf`` with an editor such as vi:
+
+``sudo vi /Library/PostgreSQL/13/data/pg_hba.conf``
+
+In the "METHOD" column, change all instances of "scram-sha-256" (or whatever is in that column) to "trust". This will make it so PostgreSQL doesn't require a password.
+
+In the Finder, click "Applications" then "PostgreSQL 13" and launch the "Reload Configuration" app. Click "OK" after you see "server signaled".
+
+Next, to confirm the edit worked, launch the "pgAdmin" application from the same folder. Under "Browser", expand "Servers" and double click "PostgreSQL 13". When you are prompted for a password, leave it blank and click "OK". If you have successfully edited "pg_hba.conf", you can get in without a password.
+
+On Linux, you should just install PostgreSQL using your favorite package manager, such as ``yum``. (Consult the PostgreSQL section of :doc:`/installation/prerequisites` in the main Installation guide for more info and command line examples). Find ``pg_hba.conf`` and set the authentication method to "trust" and restart PostgreSQL.
+
+Install Solr
+^^^^^^^^^^^^
+
+`Solr <https://lucene.apache.org/solr/>`_ 9.3.0 is required.
+
+To install Solr, execute the following commands:
+
+``sudo mkdir /usr/local/solr``
+
+``sudo chown $USER /usr/local/solr``
+
+``cd /usr/local/solr``
+
+``curl -O https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz``
+
+``tar xvfz solr-9.3.0.tgz``
+
+``cd solr-9.3.0/server/solr``
+
+``cp -r configsets/_default collection1``
+
+``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/schema.xml``
+
+``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/schema_dv_mdb_fields.xml``
+
+``mv schema*.xml collection1/conf``
+
+``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/9.3.0/solrconfig.xml``
+
+``mv solrconfig.xml collection1/conf/solrconfig.xml``
+
+``cd /usr/local/solr/solr-9.3.0``
+
+(Please note that the extra jetty argument below is a security measure to limit connections to Solr to only your computer. For extra security, run a firewall.)
+
+``bin/solr start -j "-Djetty.host=127.0.0.1"``
+
+``bin/solr create_core -c collection1 -d server/solr/collection1/conf``
+
+Install Service Dependencies Using Docker Compose
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+To avoid having to install service dependencies like PostgreSQL or Solr directly on your localhost, there is the alternative of using the ``docker-compose-dev.yml`` file available in the repository root. For this option you need to have Docker and Docker Compose installed on your machine.
+
+The ``docker-compose-dev.yml`` can be configured to only run the service dependencies necessary to support a Dataverse installation running directly on localhost. In addition to PostgreSQL and Solr, it also runs a SMTP server.
+
+Before running the Docker Compose file, you need to update the value of the ``DATAVERSE_DB_USER`` environment variable to ``postgres``. The variable can be found inside the ``.env`` file in the repository root. This step is required as the Dataverse installation script expects that database user.
+
+To run the Docker Compose file, go to the Dataverse repository root, then run:
+
+``docker-compose -f docker-compose-dev.yml up -d --scale dev_dataverse=0``
+
+Note that this command omits the Dataverse container defined in the Docker Compose file, since Dataverse is going to be installed directly on localhost in the next section.
+
+The command runs the containers in detached mode, but if you want to run them attached and thus view container logs in real time, remove the ``-d`` option from the command.
+
+Data volumes of each dependency will be persisted inside the ``docker-dev-volumes`` folder, inside the repository root.
+
+If you want to stop the containers, then run (for detached mode only, otherwise use ``Ctrl + C``):
+
+``docker-compose -f docker-compose-dev.yml stop``
+
+If you want to remove the containers, then run:
+
+``docker-compose -f docker-compose-dev.yml down``
+
+If you want to run a single container (the mail server, for example) then run:
+
+``docker-compose -f docker-compose-dev.yml up dev_smtp``
+
+For a fresh installation, and before running the Software Installer Script, it is recommended to delete the docker-dev-env folder to avoid installation problems due to existing data in the containers.
+
+Run the Dataverse Software Installer Script
+-------------------------------------------
+
+Navigate to the directory where you cloned the Dataverse Software git repo change directories to the ``scripts/installer`` directory like this:
+
+``cd scripts/installer``
+
+Create a Python virtual environment, activate it, then install dependencies:
+
+``python3 -m venv venv``
+
+``source venv/bin/activate``
+
+``pip install psycopg2-binary``
+
+The installer will try to connect to the SMTP server you tell it to use. If you haven't used the Docker Compose option for setting up the dependencies, or you don't have a mail server handy, you can run ``nc -l 25`` in another terminal and choose "localhost" (the default) to get past this check.
+
+Finally, run the installer (see also :download:`README_python.txt <../../../../scripts/installer/README_python.txt>` if necessary):
+
+``python3 install.py``
+
+Verify the Dataverse Software is Running
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+After the script has finished, you should be able to log into your Dataverse installation with the following credentials:
+
+- http://localhost:8080
+- username: dataverseAdmin
+- password: admin
+
+Configure Your Development Environment for Publishing
+-----------------------------------------------------
+
+Run the following command:
+
+``curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE``
+
+This will disable DOI registration by using a fake (in-code) DOI provider. Please note that this feature is only available in Dataverse Software 4.10+ and that at present, the UI will give no indication that the DOIs thus minted are fake.
+
+Developers may also wish to consider using :ref:`PermaLinks <permalinks>`
+
+Configure Your Development Environment for GUI Edits
+----------------------------------------------------
+
+Out of the box, a JSF setting is configured for production use and prevents edits to the GUI (xhtml files) from being visible unless you do a full deployment.
+
+It is recommended that you run the following command so that simply saving the xhtml file in Netbeans is enough for the change to show up.
+
+``asadmin create-system-properties "dataverse.jsf.refresh-period=1"``
+
+For more on JSF settings like this, see :ref:`jsf-config`.
+
+Next Steps
+----------
+
+If you can log in to the Dataverse installation, great! If not, please see the :doc:`troubleshooting` section. For further assistance, please see "Getting Help" in the :doc:`intro` section.
+
+You're almost ready to start hacking on code. Now that the installer script has you up and running, you need to continue on to the :doc:`tips` section to get set up to deploy code from your IDE or the command line.
+
+----
+
+Previous: :doc:`intro` | Next: :doc:`tips`
diff --git a/doc/sphinx-guides/source/developers/configuration.rst b/doc/sphinx-guides/source/developers/configuration.rst
index fb15fea7900..d342c28efc6 100644
--- a/doc/sphinx-guides/source/developers/configuration.rst
+++ b/doc/sphinx-guides/source/developers/configuration.rst
@@ -93,6 +93,7 @@ sub-scopes first.
 - All sub-scopes are below that.
 - Scopes are separated by dots (periods).
 - A scope may be a placeholder, filled with a variable during lookup. (Named object mapping.)
+- The setting should be in kebab case (``signing-secret``) rather than camel case (``signingSecret``).
 
 Any consumer of the setting can choose to use one of the fluent ``lookup()`` methods, which hides away alias handling,
 conversion etc from consuming code. See also the detailed Javadoc for these methods.
@@ -109,3 +110,17 @@ always like ``dataverse.<scope/....>.newname...=old.property.name``. Note this d
 aliases.
 
 Details can be found in ``edu.harvard.iq.dataverse.settings.source.AliasConfigSource``
+
+Adding a Feature Flag
+^^^^^^^^^^^^^^^^^^^^^
+
+Some parts of our codebase might be opt-in only. Experimental or optional feature previews can be switched on using our
+usual configuration mechanism, a JVM setting.
+
+Feature flags are implemented in the enumeration ``edu.harvard.iq.dataverse.settings.FeatureFlags``, which allows for
+convenient usage of it anywhere in the codebase. When adding a flag, please add it to the enum, think of a default
+status, add some Javadocs about the flagged feature and add a ``@since`` tag to make it easier to identify when a flag
+has been introduced.
+
+We want to maintain a list of all :ref:`feature flags <feature-flags>` in the :ref:`configuration guide <feature-flags>`,
+please add yours to the list.
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/developers/containers.rst b/doc/sphinx-guides/source/developers/containers.rst
index 63eff266a4f..175b178b455 100755
--- a/doc/sphinx-guides/source/developers/containers.rst
+++ b/doc/sphinx-guides/source/developers/containers.rst
@@ -2,17 +2,33 @@
 Docker, Kubernetes, and Containers
 ==================================
 
-The Dataverse Community is exploring the use of Docker, Kubernetes, and other container-related technologies. The primary community-lead projects to watch are:
+The Dataverse community is exploring the use of Docker, Kubernetes, and other container-related technologies.
 
-- https://github.com/IQSS/dataverse-kubernetes
-- https://github.com/IQSS/dataverse-docker
+.. contents:: |toctitle|
+	:local:
 
-The :doc:`testing` section mentions using Docker for integration tests.
+Container Guide
+---------------
 
-See also the :doc:`/container/index`.
+We recommend starting with the :doc:`/container/index`. The core Dataverse development team, with lots of help from the community, is iterating on containerizing the Dataverse software and its dependencies there.
 
-.. contents:: |toctitle|
-	:local:
+Help Containerize Dataverse
+---------------------------
+
+If you would like to contribute to the containerization effort, please consider joining the `Containerization Working Group <https://dataverse.org/working-groups#cwg>`_.
+
+Community-Lead Projects
+-----------------------
+
+The primary community-lead projects (which the core team is drawing inspiration from!) are:
+
+- https://github.com/IQSS/dataverse-docker
+- https://github.com/IQSS/dataverse-kubernetes (especially the https://github.com/EOSC-synergy/dataverse-kubernetes fork)
+
+Using Containers for Reproducible Research
+------------------------------------------
+
+Please see :ref:`research-code` in the User Guide for this related topic.
 
 ----
 
diff --git a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst
index 52a6a283e9c..ded62288eb2 100644
--- a/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst
+++ b/doc/sphinx-guides/source/developers/dataset-semantic-metadata-api.rst
@@ -36,6 +36,8 @@ To get the json-ld formatted metadata for a Dataset, specify the Dataset ID (DAT
 You should expect a 200 ("OK") response and JSON-LD mirroring the OAI-ORE representation in the returned 'data' object.
 
 
+.. _add-semantic-metadata:
+
 Add Dataset Metadata
 --------------------
 
diff --git a/doc/sphinx-guides/source/developers/debugging.rst b/doc/sphinx-guides/source/developers/debugging.rst
index 2088afe5521..50e8901b1ff 100644
--- a/doc/sphinx-guides/source/developers/debugging.rst
+++ b/doc/sphinx-guides/source/developers/debugging.rst
@@ -20,8 +20,8 @@ during development without recompiling. Changing the options will require at lea
 how you get these options in. (Variable substitution only happens during deployment and when using system properties
 or environment variables, you'll need to pass these into the domain, which usually will require an app server restart.)
 
-Please note that since Payara 5.2021.1 supporting MicroProfile Config 2.0, you can
-`use profiles <https://download.eclipse.org/microprofile/microprofile-config-2.0/microprofile-config-spec-2.0.html#configprofile>`_
+Please note you can use
+`MicroProfile Config <https://download.eclipse.org/microprofile/microprofile-config-2.0/microprofile-config-spec-2.0.html#configprofile>`_
 to maintain your settings more easily for different environments.
 
 .. list-table::
diff --git a/doc/sphinx-guides/source/developers/dependencies.rst b/doc/sphinx-guides/source/developers/dependencies.rst
index 65edfa3ffac..0208c49f90a 100644
--- a/doc/sphinx-guides/source/developers/dependencies.rst
+++ b/doc/sphinx-guides/source/developers/dependencies.rst
@@ -344,8 +344,7 @@ Repositories
 ------------
 
 Maven receives all dependencies from *repositories*. These can be public like `Maven Central <https://search.maven.org/>`_
-and others, but you can also use a private repository on premises or in the cloud. Last but not least, you can use
-local repositories, which can live next to your application code (see ``local_lib`` dir within the Dataverse Software codebase).
+and others, but you can also use a private repository on premises or in the cloud.
 
 Repositories are defined within the Dataverse Software POM like this:
 
@@ -364,11 +363,6 @@ Repositories are defined within the Dataverse Software POM like this:
             <url>http://repository.primefaces.org</url>
             <layout>default</layout>
         </repository>
-        <repository>
-            <id>dvn.private</id>
-            <name>Local repository for hosting jars not available from network repositories.</name>
-            <url>file://${project.basedir}/local_lib</url>
-        </repository>
     </repositories>
 
 You can also add repositories to your local Maven settings, see `docs <https://maven.apache.org/ref/3.6.0/maven-settings/settings.html>`_.
diff --git a/doc/sphinx-guides/source/developers/deployment.rst b/doc/sphinx-guides/source/developers/deployment.rst
index 84b821360be..045b0d0abbc 100755
--- a/doc/sphinx-guides/source/developers/deployment.rst
+++ b/doc/sphinx-guides/source/developers/deployment.rst
@@ -40,10 +40,10 @@ After all this, you can try the "version" command again.
 
 Note that it's possible to add an ``export`` line like the one above to your ``~/.bash_profile`` file so you don't have to run it yourself when you open a new terminal.
 
-Configure AWS CLI
-~~~~~~~~~~~~~~~~~
+Configure AWS CLI with Stored Credentials
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Next you need to configure AWS CLI.
+Dataverse can access S3 using credentials stored as described below, or using an IAM role described a little further below.
 
 Create a ``.aws`` directory in your home directory (which is called ``~``) like this:
 
@@ -70,6 +70,11 @@ Then update the file and replace the values for "aws_access_key_id" and "aws_sec
 
 If you are having trouble configuring the files manually as described above, see https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html which documents the ``aws configure`` command.
 
+Configure Role-Based S3 Access
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Amazon offers instructions on using an IAM role to grant permissions to applications running in EC2 at https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html
+
 Configure Ansible File (Optional)
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst
index e44a70a405f..1301994cc82 100755
--- a/doc/sphinx-guides/source/developers/dev-environment.rst
+++ b/doc/sphinx-guides/source/developers/dev-environment.rst
@@ -2,214 +2,81 @@
 Development Environment
 =======================
 
-These instructions are purposefully opinionated and terse to help you get your development environment up and running as quickly as possible! Please note that familiarity with running commands from the terminal is assumed.
+These instructions are oriented around Docker but the "classic" instructions we used for Dataverse 4 and 5 are still available at :doc:`classic-dev-env`.
 
 .. contents:: |toctitle|
 	:local:
 
-Quick Start
------------
+.. _container-dev-quickstart:
 
-The quickest way to get the Dataverse Software running is to use Vagrant as described in the :doc:`tools` section, but for day to day development work, we recommended the following setup.
+Quickstart
+----------
+
+First, install Java 17, Maven, and Docker.
 
-Set Up Dependencies
--------------------
+After cloning the `dataverse repo <https://github.com/IQSS/dataverse>`_, run this:
 
-Supported Operating Systems
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+``mvn -Pct clean package docker:run``
 
-Mac OS X or Linux is required because the setup scripts assume the presence of standard Unix utilities.
+After some time you should be able to log in:
 
-Windows is not well supported, unfortunately, but Vagrant and Minishift environments are described in the :doc:`windows` section.
+- url: http://localhost:8080
+- username: dataverseAdmin
+- password: admin1
+
+Detailed Steps
+--------------
 
 Install Java
 ~~~~~~~~~~~~
 
-The Dataverse Software requires Java 11.
+The Dataverse Software requires Java 17.
 
-We suggest downloading OpenJDK from https://adoptopenjdk.net
+On Mac and Windows, we suggest downloading OpenJDK from https://adoptium.net (formerly `AdoptOpenJDK <https://adoptopenjdk.net>`_) or `SDKMAN <https://sdkman.io>`_.
 
 On Linux, you are welcome to use the OpenJDK available from package managers.
 
-Install Netbeans or Maven
-~~~~~~~~~~~~~~~~~~~~~~~~~
+Install Maven
+~~~~~~~~~~~~~
 
-NetBeans IDE is recommended, and can be downloaded from http://netbeans.org . Developers may use any editor or IDE. We recommend NetBeans because it is free, works cross platform, has good support for Jakarta EE projects, and includes a required build tool, Maven.
+Follow instructions at https://maven.apache.org
 
-Below we describe how to build the Dataverse Software war file with Netbeans but if you prefer to use only Maven, you can find installation instructions in the :doc:`tools` section.
+Install and Start Docker
+~~~~~~~~~~~~~~~~~~~~~~~~
 
-Install Homebrew (Mac Only)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Follow instructions at https://www.docker.com
 
-On Mac, install Homebrew to simplify the steps below: https://brew.sh
+Be sure to start Docker.
 
-Clone the Dataverse Software Git Repo
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Git Clone Repo
+~~~~~~~~~~~~~~
 
 Fork https://github.com/IQSS/dataverse and then clone your fork like this:
 
 ``git clone git@github.com:[YOUR GITHUB USERNAME]/dataverse.git``
 
-Build the Dataverse Software War File
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you installed Netbeans, follow these steps:
-
-- Launch Netbeans and click "File" and then "Open Project". Navigate to where you put the Dataverse Software code and double-click "Dataverse" to open the project.
-- If you see "resolve project problems," go ahead and let Netbeans try to resolve them. This will probably including downloading dependencies, which can take a while.
-- Allow Netbeans to install nb-javac (required for Java 8 and below).
-- Select "Dataverse" under Projects and click "Run" in the menu and then "Build Project (Dataverse)". Check back for "BUILD SUCCESS" at the end.
-
-If you installed Maven instead of Netbeans, run ``mvn package``. Check for "BUILD SUCCESS" at the end.
-
-NOTE: Do you use a locale different than ``en_US.UTF-8`` on your development machine? Are you in a different timezone
-than Harvard (Eastern Time)? You might experience issues while running tests that were written with these settings
-in mind. The Maven  ``pom.xml`` tries to handle this for you by setting the locale to ``en_US.UTF-8`` and timezone
-``UTC``, but more, not yet discovered building or testing problems might lurk in the shadows.
-
-Install jq
-~~~~~~~~~~
-
-On Mac, run this command:
-
-``brew install jq``
-
-On Linux, install ``jq`` from your package manager or download a binary from http://stedolan.github.io/jq/
-
-Install Payara
-~~~~~~~~~~~~~~
-
-Payara 5.2022.3 or higher is required.
-
-To install Payara, run the following commands:
-
-``cd /usr/local``
-
-``sudo curl -O -L https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip``
-
-``sudo unzip payara-5.2022.3.zip``
-
-``sudo chown -R $USER /usr/local/payara5``
-
-Install PostgreSQL
-~~~~~~~~~~~~~~~~~~
-
-The Dataverse Software has been tested with PostgreSQL versions up to 13. PostgreSQL version 10+ is required. 
-
-On Mac, go to https://www.postgresql.org/download/macosx/ and choose "Interactive installer by EDB" option. Note that version 13.5 is used in the command line examples below, but the process should be similar for other versions. When prompted to set a password for the "database superuser (postgres)" just enter "password".
-
-After installation is complete, make a backup of the ``pg_hba.conf`` file like this:
-
-``sudo cp /Library/PostgreSQL/13/data/pg_hba.conf /Library/PostgreSQL/13/data/pg_hba.conf.orig``
-
-Then edit ``pg_hba.conf`` with an editor such as vi:
-
-``sudo vi /Library/PostgreSQL/13/data/pg_hba.conf``
-
-In the "METHOD" column, change all instances of "scram-sha-256" (or whatever is in that column) to "trust". This will make it so PostgreSQL doesn't require a password.
-
-In the Finder, click "Applications" then "PostgreSQL 13" and launch the "Reload Configuration" app. Click "OK" after you see "server signaled".
-
-Next, to confirm the edit worked, launch the "pgAdmin" application from the same folder. Under "Browser", expand "Servers" and double click "PostgreSQL 13". When you are prompted for a password, leave it blank and click "OK". If you have successfully edited "pg_hba.conf", you can get in without a password.
-
-On Linux, you should just install PostgreSQL using your favorite package manager, such as ``yum``. (Consult the PostgreSQL section of :doc:`/installation/prerequisites` in the main Installation guide for more info and command line examples). Find ``pg_hba.conf`` and set the authentication method to "trust" and restart PostgreSQL.
-
-Install Solr
-~~~~~~~~~~~~
-
-`Solr <http://lucene.apache.org/solr/>`_ 8.11.1 is required.
-
-To install Solr, execute the following commands:
+Build and Run
+~~~~~~~~~~~~~
 
-``sudo mkdir /usr/local/solr``
+Change into the ``dataverse`` directory you just cloned and run the following command:
 
-``sudo chown $USER /usr/local/solr``
+``mvn -Pct clean package docker:run``
 
-``cd /usr/local/solr``
+Verify 
+~~~~~~
 
-``curl -O http://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz``
+After some time you should be able to log in:
 
-``tar xvfz solr-8.11.1.tgz``
-
-``cd solr-8.11.1/server/solr``
-
-``cp -r configsets/_default collection1``
-
-``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/schema.xml``
-
-``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/schema_dv_mdb_fields.xml``
-
-``mv schema*.xml collection1/conf``
-
-``curl -O https://raw.githubusercontent.com/IQSS/dataverse/develop/conf/solr/8.11.1/solrconfig.xml``
-
-``mv solrconfig.xml collection1/conf/solrconfig.xml``
-
-``cd /usr/local/solr/solr-8.11.1``
-
-(Please note that the extra jetty argument below is a security measure to limit connections to Solr to only your computer. For extra security, run a firewall.)
-
-``bin/solr start -j "-Djetty.host=127.0.0.1"``
-
-``bin/solr create_core -c collection1 -d server/solr/collection1/conf``
-
-Run the Dataverse Software Installer Script
--------------------------------------------
-
-Navigate to the directory where you cloned the Dataverse Software git repo change directories to the ``scripts/installer`` directory like this:
-
-``cd scripts/installer``
-
-Create a Python virtual environment, activate it, then install dependencies:
-
-``python3 -m venv venv``
-
-``source venv/bin/activate``
-
-``pip install psycopg2-binary``
-
-The installer will try to connect to the SMTP server you tell it to use. If you don't have a mail server handy you can run ``nc -l 25`` in another terminal and choose "localhost" (the default) to get past this check.
-
-Finally, run the installer (see also :download:`README_python.txt <../../../../scripts/installer/README_python.txt>` if necessary):
-
-``python3 install.py``
-
-Verify the Dataverse Software is Running
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-After the script has finished, you should be able to log into your Dataverse installation with the following credentials:
-
-- http://localhost:8080
+- url: http://localhost:8080
 - username: dataverseAdmin
-- password: admin
-
-Configure Your Development Environment for Publishing
------------------------------------------------------
-
-Run the following command:
-
-``curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE``
-
-This will disable DOI registration by using a fake (in-code) DOI provider. Please note that this feature is only available in Dataverse Software 4.10+ and that at present, the UI will give no indication that the DOIs thus minted are fake.
-
-Configure Your Development Environment for GUI Edits
-----------------------------------------------------
-
-Out of the box, a JSF setting is configured for production use and prevents edits to the GUI (xhtml files) from being visible unless you do a full deployment.
-
-It is recommended that you run the following command so that simply saving the xhtml file in Netbeans is enough for the change to show up.
-
-``asadmin create-system-properties "dataverse.jsf.refresh-period=1"``
-
-For more on JSF settings like this, see :ref:`jsf-config`.
-
-Next Steps
-----------
+- password: admin1
 
-If you can log in to the Dataverse installation, great! If not, please see the :doc:`troubleshooting` section. For further assistance, please see "Getting Help" in the :doc:`intro` section.
+More Information
+----------------
 
-You're almost ready to start hacking on code. Now that the installer script has you up and running, you need to continue on to the :doc:`tips` section to get set up to deploy code from your IDE or the command line.
+See also the :doc:`/container/dev-usage` section of the Container Guide.
 
-----
+Getting Help
+------------
 
-Previous: :doc:`intro` | Next: :doc:`tips`
+Please feel free to reach out at https://chat.dataverse.org or https://groups.google.com/g/dataverse-dev if you have any difficulty setting up a dev environment!
diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst
index c89ed6e3b75..d07b5b63f72 100755
--- a/doc/sphinx-guides/source/developers/documentation.rst
+++ b/doc/sphinx-guides/source/developers/documentation.rst
@@ -8,7 +8,7 @@ Writing Documentation
 Quick Fix
 -----------
 
-If you find a typo or a small error in the documentation you can fix it using GitHub's online web editor. Generally speaking, we will be following https://help.github.com/en/articles/editing-files-in-another-users-repository
+If you find a typo or a small error in the documentation you can fix it using GitHub's online web editor. Generally speaking, we will be following https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files#editing-files-in-another-users-repository
 
 - Navigate to https://github.com/IQSS/dataverse/tree/develop/doc/sphinx-guides/source where you will see folders for each of the guides: `admin`_, `api`_, `developers`_, `installation`_, `style`_, `user`_.
 - Find the file you want to edit under one of the folders above.
@@ -36,7 +36,9 @@ If you would like to read more about the Dataverse Project's use of GitHub, plea
 Building the Guides with Sphinx
 -------------------------------
 
-The Dataverse guides are written using Sphinx (http://sphinx-doc.org). We recommend installing Sphinx and building the guides locally so you can get an accurate preview of your changes.
+The Dataverse guides are written using Sphinx (https://sphinx-doc.org). We recommend installing Sphinx on your localhost or using a Sphinx Docker container to build the guides locally so you can get an accurate preview of your changes.
+
+In case you decide to use a Sphinx Docker container to build the guides, you can skip the next two installation sections, but you will need to have Docker installed.
 
 Installing Sphinx
 ~~~~~~~~~~~~~~~~~
@@ -60,7 +62,7 @@ In some parts of the documentation, graphs are rendered as images using the Sphi
 
 Building the guides requires the ``dot`` executable from GraphViz.
 
-This requires having `GraphViz <http://graphviz.org>`_ installed and either having ``dot`` on the path or
+This requires having `GraphViz <https://graphviz.org>`_ installed and either having ``dot`` on the path or
 `adding options to the make call <https://groups.google.com/forum/#!topic/sphinx-users/yXgNey_0M3I>`_.
 
 Editing and Building the Guides
@@ -69,10 +71,15 @@ Editing and Building the Guides
 To edit the existing documentation:
 
 - Create a branch (see :ref:`how-to-make-a-pull-request`).
-- In ``doc/sphinx-guides/source`` you will find the .rst files that correspond to http://guides.dataverse.org.
+- In ``doc/sphinx-guides/source`` you will find the .rst files that correspond to https://guides.dataverse.org.
 - Using your preferred text editor, open and edit the necessary files, or create new ones.
 
-Once you are done, open a terminal, change directories to ``doc/sphinx-guides``, activate (or reactivate) your Python virtual environment, and build the guides.
+Once you are done, you can preview the changes by building the guides locally. As explained, you can build the guides with Sphinx locally installed, or with a Docker container.
+
+Building the Guides with Sphinx Locally Installed
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Open a terminal, change directories to ``doc/sphinx-guides``, activate (or reactivate) your Python virtual environment, and build the guides.
 
 ``cd doc/sphinx-guides``
 
@@ -82,6 +89,16 @@ Once you are done, open a terminal, change directories to ``doc/sphinx-guides``,
 
 ``make html``
 
+Building the Guides with a Sphinx Docker Container
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you want to build the guides using a Docker container, execute the following command in the repository root:
+
+``docker run -it --rm -v $(pwd):/docs sphinxdoc/sphinx:3.5.4 bash -c "cd doc/sphinx-guides && pip3 install -r requirements.txt && make html"``
+
+Previewing the Guides
+^^^^^^^^^^^^^^^^^^^^^
+
 After Sphinx is done processing the files you should notice that the ``html`` folder in ``doc/sphinx-guides/build`` directory has been updated.
 You can click on the files in the ``html`` folder to preview the changes.
 
@@ -124,6 +141,25 @@ In order to make it clear to the crawlers that we only want the latest version d
         Allow: /en/latest/
         Disallow: /en/
 
+PDF Version of the Guides
+-------------------------
+
+The HTML version of the guides is the official one. Any other formats are maintained on a best effort basis.
+
+If you would like to build a PDF version of the guides and have Docker installed, please try the command below from the root of the git repo:
+
+``docker run -it --rm -v $(pwd):/docs sphinxdoc/sphinx-latexpdf:3.5.4 bash -c "cd doc/sphinx-guides && pip3 install -r requirements.txt && make latexpdf LATEXMKOPTS=\"-interaction=nonstopmode\"; cd ../.. && ls -1 doc/sphinx-guides/build/latex/Dataverse.pdf"``
+
+A few notes about the command above:
+
+- Hopefully the PDF was created at ``doc/sphinx-guides/build/latex/Dataverse.pdf``.
+- For now, we are using "nonstopmode" but this masks some errors.
+- See requirements.txt for a note regarding the version of Sphinx we are using.
+
+Also, as of this writing we have enabled PDF builds from the "develop" branch. You download the PDF from http://preview.guides.gdcc.io/_/downloads/en/develop/pdf/
+
+If you would like to help improve the PDF version of the guides, please get in touch! Please see :ref:`getting-help-developers` for ways to contact the developer community.
+
 ----
 
 Previous: :doc:`testing` | Next: :doc:`dependencies`
diff --git a/doc/sphinx-guides/source/developers/globus-api.rst b/doc/sphinx-guides/source/developers/globus-api.rst
new file mode 100644
index 00000000000..de9df06a798
--- /dev/null
+++ b/doc/sphinx-guides/source/developers/globus-api.rst
@@ -0,0 +1,235 @@
+Globus Transfer API
+===================
+
+The Globus API addresses three use cases:
+* Transfer to a Dataverse-managed Globus endpoint (File-based or using the Globus S3 Connector)
+* Reference of files that will remain in a remote Globus endpoint
+* Transfer from a Dataverse-managed Globus endpoint
+
+The ability for Dataverse to interact with Globus endpoints is configured via a Globus store - see :ref:`globus-storage`.
+
+Globus transfers (or referencing a remote endpoint) for upload and download transfers involve a series of steps. These can be accomplished using the Dataverse and Globus APIs. (These are used internally by the `dataverse-globus app <https://github.com/scholarsportal/dataverse-globus>`_ when transfers are done via the Dataverse UI.) 
+
+Requesting Upload or Download Parameters
+----------------------------------------
+
+The first step in preparing for a Globus transfer/reference operation is to request the parameters relevant for a given dataset:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/globusUploadParameters?locale=$LOCALE"
+
+The response will be of the form:
+
+.. code-block:: bash
+
+  {
+          "status": "OK",
+          "data": {
+              "queryParameters": {
+                  "datasetId": 29,
+                  "siteUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com",
+                  "datasetVersion": ":draft",
+                  "dvLocale": "en",
+                  "datasetPid": "doi:10.5072/FK2/ILLPXE",
+                  "managed": "true",
+                  "endpoint": "d8c42580-6528-4605-9ad8-116a61982644"
+              },
+              "signedUrls": [
+                  {
+                      "name": "requestGlobusTransferPaths",
+                      "httpMethod": "POST",
+                      "signedUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com/api/v1/datasets/29/requestGlobusUploadPaths?until=2023-11-22T01:52:03.648&user=dataverseAdmin&method=POST&token=63ac4bb748d12078dded1074916508e19e6f6b61f64294d38e0b528010b07d48783cf2e975d7a1cb6d4a3c535f209b981c7c6858bc63afdfc0f8ecc8a139b44a",
+                      "timeOut": 300
+                  },
+                  {
+                      "name": "addGlobusFiles",
+                      "httpMethod": "POST",
+                      "signedUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com/api/v1/datasets/29/addGlobusFiles?until=2023-11-22T01:52:03.648&user=dataverseAdmin&method=POST&token=2aaa03f6b9f851a72e112acf584ffc0758ed0cc8d749c5a6f8c20494bb7bc13197ab123e1933f3dde2711f13b347c05e6cec1809a8f0b5484982570198564025",
+                      "timeOut": 300
+                  },
+                  {
+                      "name": "getDatasetMetadata",
+                      "httpMethod": "GET",
+                      "signedUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com/api/v1/datasets/29/versions/:draft?until=2023-11-22T01:52:03.649&user=dataverseAdmin&method=GET&token=1878d6a829cd5540e89c07bdaf647f1bea5314cc7a55433b0b506350dd330cad61ade3714a8ee199a7b464fb3b8cddaea0f32a89ac3bfc4a86cd2ea3004ecbb8",
+                      "timeOut": 300
+                  },
+                  {
+                      "name": "getFileListing",
+                      "httpMethod": "GET",
+                      "signedUrl": "http://ec2-34-204-169-194.compute-1.amazonaws.com/api/v1/datasets/29/versions/:draft/files?until=2023-11-22T01:52:03.650&user=dataverseAdmin&method=GET&token=78e8ca8321624f42602af659227998374ef3788d0feb43d696a0e19086e0f2b3b66b96981903a1565e836416c504b6248cd3c6f7c2644566979bd16e23a99622",
+                      "timeOut": 300
+                  }
+              ]
+          }
+    }
+
+The response includes the id for the Globus endpoint to use along with several signed URLs.
+
+The getDatasetMetadata and getFileListing URLs are just signed versions of the standard Dataset metadata and file listing API calls. The other two are Globus specific.
+
+If called for a dataset using a store that is configured with a remote Globus endpoint(s), the return response is similar but the response includes a
+the "managed" parameter will be false, the "endpoint" parameter is replaced with a JSON array of "referenceEndpointsWithPaths" and the
+requestGlobusTransferPaths and addGlobusFiles URLs are replaced with ones for requestGlobusReferencePaths and addFiles. All of these calls are
+described further below.
+
+The call to set up for a transfer out (download) is similar:
+
+.. code-block:: bash
+
+  curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/globusDownloadParameters?locale=$LOCALE"
+
+Note that this API call supports an additional downloadId query parameter. This is only used when the globus-dataverse app is called from the Dataverse user interface. There is no need to use it when calling the API directly.
+
+The returned response includes the same getDatasetMetadata and getFileListing URLs as in the upload case and includes "monitorGlobusDownload" and "requestGlobusDownload" URLs. The response will also indicate whether the store is "managed" and will provide the "endpoint" from which downloads can be made.
+
+
+Performing an Upload/Transfer In
+--------------------------------
+
+The information from the API call above can be used to provide a user with information about the dataset and to prepare to transfer or to reference files (based on the "managed" parameter). 
+
+Once the user identifies which files are to be added, the requestGlobusTransferPaths or requestGlobusReferencePaths URLs can be called. These both reference the same API call but must be used with different entries in the JSON body sent:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV
+  export LOCALE=en-US
+ 
+  curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/requestGlobusUpload"
+
+Note that when using the dataverse-globus app or the return from the previous call, the URL for this call will be signed and no API_TOKEN is needed. 
+  
+In the managed case, the JSON body sent must include the id of the Globus user that will perform the transfer and the number of files that will be transferred:
+
+.. code-block:: bash
+
+  {
+    "principal":"d15d4244-fc10-47f3-a790-85bdb6db9a75", 
+    "numberOfFiles":2
+  }
+
+In the remote reference case, the JSON body sent must include the Globus endpoint/paths that will be referenced:
+
+.. code-block:: bash
+
+  {
+    "referencedFiles":[
+      "d8c42580-6528-4605-9ad8-116a61982644/hdc1/test1.txt"
+    ]
+  }
+    
+The response will include a JSON object. In the managed case, the map is from new assigned file storageidentifiers and specific paths on the managed Globus endpoint:
+
+.. code-block:: bash
+
+  {
+    "status":"OK",
+    "data":{
+      "globusm://18b49d3688c-62137dcb06e4":"/hdc1/10.5072/FK2/ILLPXE/18b49d3688c-62137dcb06e4",
+      "globusm://18b49d3688c-5c17d575e820":"/hdc1/10.5072/FK2/ILLPXE/18b49d3688c-5c17d575e820"
+    }
+  }
+
+In the managed case, the specified Globus principal is granted write permission to the specified endpoint/path,
+which will allow initiation of a transfer from the external endpoint to the managed endpoint using the Globus API.
+The permission will be revoked if the transfer is not started and the next call to Dataverse to finish the transfer are not made within a short time (configurable, default of 5 minutes).
+ 
+In the remote/reference case, the map is from the initially supplied endpoint/paths to the new assigned file storageidentifiers:
+
+.. code-block:: bash
+
+  {
+    "status":"OK",
+    "data":{
+      "d8c42580-6528-4605-9ad8-116a61982644/hdc1/test1.txt":"globus://18bf8c933f4-ed2661e7d19b//d8c42580-6528-4605-9ad8-116a61982644/hdc1/test1.txt"
+    }
+  }
+
+
+
+Adding Files to the Dataset
+---------------------------
+
+In the managed case, once a Globus transfer has been initiated a final API call is made to Dataverse to provide it with the task identifier of the transfer and information about the files being transferred:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV
+  export JSON_DATA='{"taskIdentifier":"3f530302-6c48-11ee-8428-378be0d9c521", \
+                    "files": [{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b3972213f-f6b5c2221423", "fileName":"file1.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "1234"}}, \
+                    {"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false", "storageIdentifier":"globusm://18b39722140-50eb7d3c5ece", "fileName":"file2.txt", "mimeType":"text/plain", "checksum": {"@type": "MD5", "@value": "2345"}}]}'
+
+  curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:multipart/form-data" -X POST "$SERVER_URL/api/datasets/:persistentId/addGlobusFiles -F "jsonData=$JSON_DATA"
+
+Note that the mimetype is multipart/form-data, matching the /addFiles API call. ALso note that the API_TOKEN is not needed when using a signed URL.
+
+With this information, Dataverse will begin to monitor the transfer and when it completes, will add all files for which the transfer succeeded.
+As the transfer can take significant time and the API call is asynchronous, the only way to determine if the transfer succeeded via API is to use the standard calls to check the dataset lock state and contents.
+
+Once the transfer completes, Dataverse will remove the write permission for the principal.
+
+Note that when using a managed endpoint that uses the Globus S3 Connector, the checksum should be correct as Dataverse can validate it. For file-based endpoints, the checksum should be included if available but Dataverse cannot verify it.
+
+In the remote/reference case, where there is no transfer to monitor, the standard /addFiles API call (see :ref:`direct-add-to-dataset-api`) is used instead. There are no changes for the Globus case.
+
+Downloading/Transfer Out Via Globus
+-----------------------------------
+
+To begin downloading files, the requestGlobusDownload URL is used:
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV
+  
+  curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/requestGlobusDownload"
+
+The JSON body sent should include a list of file ids to download and, for a managed endpoint, the Globus principal that will make the transfer:
+
+.. code-block:: bash
+
+  {
+    "principal":"d15d4244-fc10-47f3-a790-85bdb6db9a75", 
+    "fileIds":[60, 61]
+  }
+  
+Note that this API call takes an optional downloadId parameter that is used with the dataverse-globus app. When downloadId is included, the list of fileIds is not needed.
+
+The response is a JSON object mapping the requested file Ids to Globus endpoint/paths. In the managed case, the principal will have been given read permissions for the specified paths:
+
+.. code-block:: bash
+
+  {
+    "status":"OK",
+    "data":{
+      "60": "d8c42580-6528-4605-9ad8-116a61982644/hdc1/10.5072/FK2/ILLPXE/18bf3af9c78-92b8e168090e",
+     "61": "d8c42580-6528-4605-9ad8-116a61982644/hdc1/10.5072/FK2/ILLPXE/18bf3af9c78-c8d81569305c"
+    }
+  }
+
+For the remote case, the use can perform the transfer without further contact with Dataverse. In the managed case, the user must initiate the transfer via the Globus API and then inform Dataverse.
+Dataverse will then monitor the transfer and revoke the read permission when the transfer is complete. (Not making this last call could result in failure of the transfer.)
+
+.. code-block:: bash
+
+  export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+  export SERVER_URL=https://demo.dataverse.org
+  export PERSISTENT_IDENTIFIER=doi:10.5072/FK27U7YBV
+  
+  curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/datasets/:persistentId/monitorGlobusDownload"
+  
+The JSON body sent just contains the task identifier for the transfer:
+
+.. code-block:: bash
+
+  {
+    "taskIdentifier":"b5fd01aa-8963-11ee-83ae-d5484943e99a"
+  }
+ 
+
diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst
index 6f93cf75d51..25fea138736 100755
--- a/doc/sphinx-guides/source/developers/index.rst
+++ b/doc/sphinx-guides/source/developers/index.rst
@@ -4,7 +4,7 @@
    contain the root `toctree` directive.
 
 Developer Guide
-=======================================================
+===============
 
 **Contents:**
 
@@ -19,7 +19,9 @@ Developer Guide
    sql-upgrade-scripts
    testing
    documentation
+   api-design
    security
+   performance
    dependencies
    debugging
    coding-style
@@ -27,6 +29,8 @@ Developer Guide
    deployment
    containers
    making-releases
+   making-library-releases
+   metadataexport
    tools
    unf/index
    make-data-count
@@ -36,8 +40,10 @@ Developer Guide
    big-data-support
    aux-file-support
    s3-direct-upload-api
+   globus-api
    dataset-semantic-metadata-api
    dataset-migration-api 
    workflows
    fontcustom
+   classic-dev-env
    
diff --git a/doc/sphinx-guides/source/developers/intro.rst b/doc/sphinx-guides/source/developers/intro.rst
index 7f4e8c1ba34..a01a8066897 100755
--- a/doc/sphinx-guides/source/developers/intro.rst
+++ b/doc/sphinx-guides/source/developers/intro.rst
@@ -2,7 +2,7 @@
 Introduction
 ============
 
-Welcome! `The Dataverse Project <http://dataverse.org>`_ is an `open source <https://github.com/IQSS/dataverse/blob/master/LICENSE.md>`_ project that loves `contributors <https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md>`_!
+Welcome! `The Dataverse Project <https://dataverse.org>`_ is an `open source <https://github.com/IQSS/dataverse/blob/master/LICENSE.md>`_ project that loves `contributors <https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md>`_!
 
 .. contents:: |toctitle|
 	:local:
@@ -19,7 +19,7 @@ To get started, you'll want to set up your :doc:`dev-environment` and make sure
 Getting Help
 ------------
 
-If you have any questions at all, please reach out to other developers via the channels listed in https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md such as http://chat.dataverse.org, the `dataverse-dev <https://groups.google.com/forum/#!forum/dataverse-dev>`_ mailing list, `community calls <https://dataverse.org/community-calls>`_, or support@dataverse.org.
+If you have any questions at all, please reach out to other developers via the channels listed in https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md such as https://chat.dataverse.org, the `dataverse-dev <https://groups.google.com/forum/#!forum/dataverse-dev>`_ mailing list, `community calls <https://dataverse.org/community-calls>`_, or support@dataverse.org.
 
 .. _core-technologies:
 
@@ -52,7 +52,9 @@ Related Guides
 
 If you are a developer who wants to make use of the Dataverse Software APIs, please see the :doc:`/api/index`. If you have front-end UI questions, please see the :doc:`/style/index`.
 
-If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`. We validate the installation scripts with :doc:`/developers/tools` such as `Vagrant <http://vagrantup.com>`_ and Docker (see the :doc:`containers` section).
+If you are a sysadmin who likes to code, you may be interested in hacking on installation scripts mentioned in the :doc:`/installation/index`.
+
+If you are a Docker enthusiasts, please check out the :doc:`/container/index`.
 
 Related Projects
 ----------------
diff --git a/doc/sphinx-guides/source/developers/make-data-count.rst b/doc/sphinx-guides/source/developers/make-data-count.rst
index a3c0d10dc5e..8eaa5c0d7f8 100644
--- a/doc/sphinx-guides/source/developers/make-data-count.rst
+++ b/doc/sphinx-guides/source/developers/make-data-count.rst
@@ -30,15 +30,13 @@ Full Setup
 
 The recommended way to work on the Make Data Count feature is to spin up an EC2 instance that has both the Dataverse Software and Counter Processor installed. Go to the :doc:`deployment` page for details on how to spin up an EC2 instance and make sure that your Ansible file is configured to install Counter Processor before running the "create" script.
 
-(Alternatively, you can try installing Counter Processor in Vagrant. :download:`setup-counter-processor.sh <../../../../scripts/vagrant/setup-counter-processor.sh>` might help you get it installed.)
-
 After you have spun to your EC2 instance, set ``:MDCLogPath`` so that the Dataverse installation creates a log for Counter Processor to operate on. For more on this database setting, see the :doc:`/installation/config` section of the Installation Guide.
 
 Next you need to have the Dataverse installation add some entries to the log that Counter Processor will operate on. To do this, click on some published datasets and download some files.
 
-Next you should run Counter Processor to convert the log into a SUSHI report, which is in JSON format. Before running Counter Processor, you need to put a configuration file into place. As a starting point use :download:`counter-processor-config.yaml <../../../../scripts/vagrant/counter-processor-config.yaml>` and edit the file, paying particular attention to the following settings:
+Next you should run Counter Processor to convert the log into a SUSHI report, which is in JSON format. Before running Counter Processor, you need to put a configuration file into place. As a starting point use :download:`counter-processor-config.yaml <../_static/developers/counter-processor-config.yaml>` and edit the file, paying particular attention to the following settings:
 
-- ``log_name_pattern`` You might want something like ``/usr/local/payara5/glassfish/domains/domain1/logs/counter_(yyyy-mm-dd).log``
+- ``log_name_pattern`` You might want something like ``/usr/local/payara6/glassfish/domains/domain1/logs/counter_(yyyy-mm-dd).log``
 - ``year_month`` You should probably set this to the current month.
 - ``output_file`` This needs to be a directory that the "dataverse" Unix user can read but that the "counter" user can write to. In dev, you can probably get away with "/tmp" as the directory.
 - ``platform`` Out of the box from Counter Processor this is set to ``Dash`` but this should be changed to match the name of your Dataverse installation. Examples are "Harvard Dataverse Repository" for Harvard University or "LibraData" for the University of Virginia.
diff --git a/doc/sphinx-guides/source/developers/making-library-releases.rst b/doc/sphinx-guides/source/developers/making-library-releases.rst
new file mode 100755
index 00000000000..63b6eeb1c2a
--- /dev/null
+++ b/doc/sphinx-guides/source/developers/making-library-releases.rst
@@ -0,0 +1,93 @@
+=======================
+Making Library Releases
+=======================
+
+.. contents:: |toctitle|
+	:local:
+
+Introduction
+------------
+
+Note: See :doc:`making-releases` for Dataverse itself.
+
+We release Java libraries to Maven Central that are used by Dataverse (and perhaps `other <https://github.com/gdcc/xoai/issues/141>`_ `software <https://github.com/gdcc/xoai/issues/170>`_!):
+
+- https://central.sonatype.com/namespace/org.dataverse
+- https://central.sonatype.com/namespace/io.gdcc
+
+We release JavaScript/TypeScript libraries to npm:
+
+- https://www.npmjs.com/package/@iqss/dataverse-design-system
+
+Maven Central (Java)
+--------------------
+
+From the perspective of the Maven Central, we are both `producers <https://central.sonatype.org/publish/>`_ because we publish/release libraries there and `consumers <https://central.sonatype.org/consume/>`_ because we pull down those libraries (and many others) when we build Dataverse. 
+
+Releasing Existing Libraries to Maven Central
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you need to release an existing library, all the setup should be done already. The steps below assume that GitHub Actions are in place to do the heavy lifting for you, such as signing artifacts with GPG.
+
+Releasing a Snapshot Version to Maven Central
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+`Snapshot <https://maven.apache.org/guides/getting-started/index.html#what-is-a-snapshot-version>`_ releases are published automatically through GitHub Actions (e.g. through a `snapshot workflow <https://github.com/gdcc/sword2-server/blob/main/.github/workflows/maven-snapshot.yml>`_ for the SWORD library) every time a pull request is merged (or the default branch, typically ``main``, is otherwise updated).
+
+That is to say, to make a snapshot release, you only need to get one or more commits into the default branch.
+
+Releasing a Release (Non-Snapshot) Version to Maven Central
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+From a pom.xml it may not be apparent that snapshots like ``6.0-SNAPSHOT`` might be changing under your feet. Browsing the snapshot repository (e.g. our `UNF 6.0-SNAPSHOT <https://s01.oss.sonatype.org/content/groups/staging/org/dataverse/unf/6.0-SNAPSHOT/>`_), may reveal versions changing over time. To finalize the code and stop it from changing, we publish/release what Maven calls a "`release version <https://maven.apache.org/guides/getting-started/index.html#what-is-a-snapshot-version>`_". This will remove ``-SNAPSHOT`` from the version (through an ``mvn`` command).
+
+Non-snapshot releases (`release <https://maven.apache.org/guides/getting-started/index.html#what-is-a-snapshot-version>`_ versions) are published automatically through GitHub Actions (e.g. through a `release workflow <https://github.com/gdcc/sword2-server/blob/main/.github/workflows/maven-release.yml>`_), kicked off locally by an ``mvn`` command that invokes the `Maven Release Plugin <https://maven.apache.org/maven-release/maven-release-plugin/>`_.
+
+First, run a clean:
+
+``mvn release:clean``
+
+Then run a prepare:
+
+``mvn release:prepare``
+
+The prepare step is interactive. You will be prompted for the following information:
+
+- the release version (e.g. `2.0.0 <https://repo.maven.apache.org/maven2/io/gdcc/sword2-server/2.0.0/>`_)
+- the git tag to create and push (e.g. `sword2-server-2.0.0 <https://github.com/gdcc/sword2-server/releases/tag/sword2-server-2.0.0>`_)
+- the next development (snapshot) version (e.g. `2.0.1-SNAPSHOT <https://s01.oss.sonatype.org/#nexus-search;checksum~47575aed5471adeb0a08a02098ce3a23a5778afb>`_)
+
+These examples from the SWORD library. Below is what to expect from the interactive session. In many cases, you can just hit enter to accept the defaults.
+
+.. code-block:: bash
+
+        [INFO] 5/17 prepare:map-release-versions
+        What is the release version for "SWORD v2 Common Server Library (forked)"? (sword2-server) 2.0.0: :
+        [INFO] 6/17 prepare:input-variables
+        What is the SCM release tag or label for "SWORD v2 Common Server Library (forked)"? (sword2-server) sword2-server-2.0.0: :
+        [INFO] 7/17 prepare:map-development-versions
+        What is the new development version for "SWORD v2 Common Server Library (forked)"? (sword2-server) 2.0.1-SNAPSHOT: :
+        [INFO] 8/17 prepare:rewrite-poms-for-release
+
+It can take some time for the jar to be visible on Maven Central. You can start by looking on the repo1 server, like this: https://repo1.maven.org/maven2/io/gdcc/sword2-server/2.0.0/
+
+Don't bother putting the new version in a pom.xml until you see it on repo1.
+
+Note that the next snapshot release should be available as well, like this: https://s01.oss.sonatype.org/content/groups/staging/io/gdcc/sword2-server/2.0.1-SNAPSHOT/ 
+
+Releasing a New Library to Maven Central
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+At a high level:
+
+- Use an existing pom.xml as a starting point.
+- Use existing GitHub Actions workflows as a starting point.
+- Create secrets in the new library's GitHub repo used by the workflow.
+- If you need an entire new namespace, look at previous issues such as https://issues.sonatype.org/browse/OSSRH-94575 and https://issues.sonatype.org/browse/OSSRH-94577
+
+npm (JavaScript/TypeScript)
+---------------------------
+
+Currently, publishing `@iqss/dataverse-design-system <https://www.npmjs.com/package/@iqss/dataverse-design-system>`_ to npm done manually. We plan to automate this as part of https://github.com/IQSS/dataverse-frontend/issues/140
+
+https://www.npmjs.com/package/js-dataverse is the previous 1.0 version of js-dataverse. No 1.x releases are planned. We plan to publish 2.0 (used by the new frontend) as discussed in https://github.com/IQSS/dataverse-frontend/issues/13
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst
index a2575bb5f50..23c4773a06e 100755
--- a/doc/sphinx-guides/source/developers/making-releases.rst
+++ b/doc/sphinx-guides/source/developers/making-releases.rst
@@ -8,6 +8,8 @@ Making Releases
 Introduction
 ------------
 
+Note: See :doc:`making-library-releases` for how to publish our libraries to Maven Central. 
+
 See :doc:`version-control` for background on our branching strategy.
 
 The steps below describe making both regular releases and hotfix releases.
diff --git a/doc/sphinx-guides/source/developers/metadataexport.rst b/doc/sphinx-guides/source/developers/metadataexport.rst
new file mode 100644
index 00000000000..7f7536fb7f8
--- /dev/null
+++ b/doc/sphinx-guides/source/developers/metadataexport.rst
@@ -0,0 +1,88 @@
+=======================
+Metadata Export Formats
+=======================
+
+.. contents:: |toctitle|
+    :local:
+
+Introduction
+------------
+
+Dataverse ships with a number of metadata export formats available for published datasets. A given metadata export
+format may be available for user download (via the UI and API) and/or be available for use in Harvesting between
+Dataverse instances.
+
+As of v5.14, Dataverse provides a mechanism for third-party developers to create new metadata Exporters than implement
+new metadata formats or that replace existing formats. All the necessary dependencies are packaged in an interface JAR file
+available from Maven Central. Developers can distribute their new Exporters as JAR files which can be dynamically loaded
+into Dataverse instances - see :ref:`external-exporters`. Developers are encouraged to make their Exporter code available
+via https://github.com/gdcc/dataverse-exporters (or minimally, to list their existence in the README there). 
+
+Exporter Basics
+---------------
+
+New Exports must implement the ``io.gdcc.spi.export.Exporter`` interface. The interface includes a few methods for the Exporter
+to provide Dataverse with the format it produces, a display name, format mimetype, and whether the format is for download 
+and/or harvesting use, etc. It also includes a main ``exportDataset(ExportDataProvider dataProvider, OutputStream outputStream)``
+method through which the Exporter receives metadata about the given dataset (via the ``ExportDataProvider``, described further 
+below) and writes its output (as an OutputStream).
+
+Exporters that create an XML format must implement the ``io.gdcc.spi.export.XMLExporter`` interface (which extends the Exporter
+interface). XMLExporter adds a few methods through which the XMLExporter provides information to Dataverse about the XML 
+namespace and version being used.
+
+Exporters also need to use the ``@AutoService(Exporter.class)`` which makes the class discoverable as an Exporter implementation.
+
+The ``ExportDataProvider`` interface provides several methods through which your Exporter can receive dataset and file metadata
+in various formats. Your exporter would parse the information in one or more of these inputs to retrieve the values needed to
+generate the Exporter's output format.
+
+The most important methods/input formats are:
+
+- ``getDatasetJson()`` - metadata in the internal Dataverse JSON format used in the native API and available via the built-in JSON metadata export.
+- ``getDatasetORE()`` - metadata in the OAI_ORE format available as a built-in metadata format and as used in Dataverse's BagIT-based Archiving capability. 
+- ``getDatasetFileDetails`` - detailed file-level metadata for ingested tabular files.
+ 
+The first two of these provide ~complete metadata about the dataset along with the metadata common to all files. This includes all metadata
+entries from all metadata blocks, PIDs, tags, Licenses and custom terms, etc. Almost all built-in exporters today use the JSON input.
+The newer OAI_ORE export, which is JSON-LD-based, provides a flatter structure and references metadata terms by their external vocabulary ids
+(e.g. http://purl.org/dc/terms/title) which may make it a prefereable starting point in some cases.
+ 
+The last method above provides a new JSON-formatted serialization of the variable-level file metadata Dataverse generates during ingest of tabular files.
+This information has only been included in the built-in DDI export, as the content of a ``dataDscr`` element. (Hence inspecting the edu.harvard.iq.dataverse.export.DDIExporter and related classes would be a good way to explore how the JSON is structured.) 
+
+The interface also provides
+
+- ``getDatasetSchemaDotOrg();`` and
+- ``getDataCiteXml();``.
+  
+These provide subsets of metadata in the indicated formats. They may be useful starting points if your exporter will, for example, only add one or two additional fields to the given format.
+
+If an Exporter cannot create a requested metadata format for some reason, it should throw an ``io.gdcc.spi.export.ExportException``.
+
+Building an Exporter
+--------------------
+
+The example at https://github.com/gdcc/dataverse-exporters provides a Maven pom.xml file suitable for building an Exporter JAR file and that repository provides additional development guidance.
+
+There are four dependencies needed to build an Exporter:
+
+- ``io.gdcc dataverse-spi`` library containing the interfaces discussed above and the ExportException class
+- ``com.google.auto.service auto-service``, which provides the @AutoService annotation
+- ``jakarta.json jakarata.json-api`` for JSON classes
+- ``jakarta.ws.rs jakarta.ws.rs-api``, which provides a MediaType enumeration for specifying mime types.
+
+Specifying a Prerequisite Export
+--------------------------------
+
+An advanced feature of the Exporter mechanism allows a new Exporter to specify that it requires, as input, 
+the output of another Exporter. An example of this is the builting HTMLExporter which requires the output 
+of the DDI XML Exporter to produce an HTML document with the same DDI content.
+
+This is configured by providing the metadata format name via the ``Exporter.getPrerequisiteFormatName()`` method.
+When this method returns a non-empty format name, Dataverse will provide the requested format to the Exporter via
+the ``ExportDataProvider.getPrerequisiteInputStream()`` method.
+
+Developers and administrators deploying Exporters using this mechanism should be aware that, since metadata formats
+can be changed by other Exporters, the InputStream received may not hold the expected metadata. Developers should clearly
+document their compatability with the built-in or third-party Exporters they support as prerequisites.
diff --git a/doc/sphinx-guides/source/developers/performance.rst b/doc/sphinx-guides/source/developers/performance.rst
new file mode 100644
index 00000000000..46c152f322e
--- /dev/null
+++ b/doc/sphinx-guides/source/developers/performance.rst
@@ -0,0 +1,196 @@
+Performance
+===========
+
+`Performance is a feature <https://blog.codinghorror.com/performance-is-a-feature/>`_ was a mantra when Stack Overflow was being developed. We endeavor to do the same with Dataverse!
+
+In this section we collect ideas and share practices for improving performance.
+
+.. contents:: |toctitle|
+        :local:
+
+Problem Statement
+-----------------
+
+Performance has always been important to the Dataverse Project, but results have been uneven. We've seen enough success in the marketplace that performance must be adequate, but internally we sometimes refer to Dataverse as a pig. 🐷
+
+Current Practices
+-----------------
+
+We've adopted a number of practices to help us maintain our current level of performance and most should absolutely continue in some form, but challenges mentioned throughout should be addressed to further improve performance.
+
+Cache When You Can
+~~~~~~~~~~~~~~~~~~
+
+The Metrics API, for example, caches values for 7 days by default. We took a look at JSR 107 (JCache - Java Temporary Caching API) in `#2100 <https://github.com/IQSS/dataverse/issues/2100>`_. We're aware of the benefits of caching.
+
+Use Async
+~~~~~~~~~
+
+We index datasets (and all objects) asynchronously. That is, we let changes persist in the database and afterward copy the data into Solr.
+
+Use a Queue
+~~~~~~~~~~~
+
+We use a JMS queue for when ingesting tabular files. We've talked about adding a queue (even `an external queue <https://github.com/IQSS/dataverse/issues/1100%23issuecomment-311341995>`_) for indexing, DOI registration, and other services.
+
+Offload Expensive Operations Outside the App Server
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When operations are computationally expensive, we have realized performance gains by offloading them to systems outside of the core code. For example, rather than having files pass through our application server when they are downloaded, we use direct download so that client machines download files directly from S3. (We use the same trick with upload.) When a client downloads multiple files, rather than zipping them within the application server as before, we now have a separate "zipper" process that does this work out of band.
+
+Drop to Raw SQL as Necessary
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We aren't shy about writing raw SQL queries when necessary. We've written `querycount <https://github.com/IQSS/dataverse/blob/v6.0/scripts/database/querycount/README.txt>`_  scripts to help identify problematic queries and mention slow query log at :doc:`/admin/monitoring`.
+
+Add Indexes to Database Tables
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+There was a concerted effort in `#1880 <https://github.com/IQSS/dataverse/issues/1880>`_ to add indexes to a large number of columns, but it's something we're mindful of, generally. Perhaps we could use some better detection of when indexes would be valuable.
+
+Find Bottlenecks with a Profiler
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+VisualVM is popular and bundled with Netbeans. Many options are available including `JProfiler <https://github.com/IQSS/dataverse/pull/9413>`_.
+
+Warn Developers in Code Comments
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For code that has been optimized for performance, warnings are sometimes inserted in the form of comments for future developers to prevent backsliding.
+
+Write Docs for Devs about Perf
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Like this doc. :)
+
+Sometimes perf is written about in other places, such as :ref:`avoid-efficiency-issues-with-render-logic-expressions`.
+
+Horizontal Scaling of App Server
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We've made it possible to run more than one application server, though it requires some special configuration. This way load can be spread out across multiple servers. For details, see :ref:`multiple-app-servers` in the Installation Guide.
+
+Code Review and QA
+~~~~~~~~~~~~~~~~~~
+
+Before code is merged, while it is in review or QA, if a performance problem is detected (usually on an ad hoc basis), the code is returned to the developer for improvement. Developers and reviewers typically do not have many tools at their disposal to test code changes against anything close to production data. QA maintains a machine with a copy of production data but tests against smaller data unless a performance problem is suspected.
+
+A new QA guide is coming in https://github.com/IQSS/dataverse/pull/10103
+
+Locust Testing at Release Time
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+As one of the final steps in preparing for a release, QA runs performance tests using a tool called Locust as explained the Developer Guide (see :ref:`locust`). The tests are not comprehensive, testing only a handful of pages with anonymous users, but they increase confidence that the upcoming release is not drastically slower than previous releases.
+
+Issue Tracking and Prioritization
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Performance issues are tracked in our issue tracker under the `Feature: Performance & Stability <https://github.com/IQSS/dataverse/issues?q=is%3Aopen+is%3Aissue+label%3A%22Feature%3A+Performance+%26+Stability%22>`_ label (e.g. `#7788 <https://github.com/IQSS/dataverse/issues/7788>`_). That way, we can track performance problems throughout the application. Unfortunately, the pain is often felt by users in production before we realize there is a problem. As needed, performance issues are prioritized to be included in a sprint, to \ `speed up the collection page <https://github.com/IQSS/dataverse/pull/8143>`_, for example.
+
+Document Performance Tools
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In the :doc:`/admin/monitoring` page section of the Admin Guide we describe how to set up Munin for monitoring performance of an operating system. We also explain how to set up Performance Insights to monitor AWS RDS (PostgreSQL as a service, in our case). In the :doc:`/developers/tools` section of the Developer Guide, we have documented how to use Eclipse Memory Analyzer Tool (MAT), SonarQube, jmap, and jstat.
+
+Google Analytics
+~~~~~~~~~~~~~~~~
+
+Emails go to a subset of the team monthly with subjects like "Your September Search performance for https://dataverse.harvard.edu" with a link to a report but it's mostly about the number clicks, not how fast the site is. It's unclear if it provides any value with regard to performance.
+
+Abandoned Tools and Practices
+-----------------------------
+
+New Relic
+~~~~~~~~~
+
+For many years Harvard Dataverse was hooked up to New Relic, a tool that promises all-in-one observability, according to their `website <https://newrelic.com>`_. In practice, we didn't do much with `the data <https://github.com/IQSS/dataverse/issues/3665>`_.
+
+Areas of Particular Concern
+---------------------------
+
+Command Engine Execution Rate Metering
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We'd like to rate limit commands (CreateDataset, etc.) so that we can keep them at a reasonable level (`#9356 <https://github.com/IQSS/dataverse/issues/9356>`_). This is similar to how many APIs are rate limited, such as the GitHub API.
+
+Solr
+~~~~
+
+While in the past Solr performance hasn't been much of a concern, in recent years we've noticed performance problems when Harvard Dataverse is under load. Improvements were made in `PR #10050 <https://github.com/IQSS/dataverse/pull/10050>`_, for example.
+
+Datasets with Large Numbers of Files or Versions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We'd like to scale Dataverse to better handle large number of files or versions. Progress was made in `PR #9883 <https://github.com/IQSS/dataverse/pull/9883>`_.
+
+Withstanding Bots
+~~~~~~~~~~~~~~~~~
+
+Google bot, etc.
+
+Suggested Practices
+-------------------
+
+Many of our current practices should remain in place unaltered. Others could use some refinement. Some new practices should be adopted as well. Here are some suggestions.
+
+Implement the Frontend Plan for Performance
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The `Dataverse - SPA MVP Definition doc <https://docs.google.com/document/d/1WnJzLeVK5eVP4_10eX6BwPAnmiamO1n2uGzcwrAsucQ/edit?usp%3Dsharing>`_  has some ideas around how to achieve good performance for the new front end in the areas of rendering, monitoring,file upload/download, pagination, and caching. We should create as many issues as necessary in the frontend repo and work on them in time. The doc recommends the use of `React Profiler <https://legacy.reactjs.org/blog/2018/09/10/introducing-the-react-profiler.html>`_ and other tools. Not mentioned is https://pagespeed.web.dev but we can investigate it as well. See also `#183 <https://github.com/IQSS/dataverse-frontend/issues/183>`_, a parent issue about performance. In `#184 <https://github.com/IQSS/dataverse-frontend/issues/184>`_  we plan to compare the performance of the old JSF UI vs. the new React UI. Cypress plugins for load testing could be investigated.
+
+Set up Query Counter in Jenkins
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+See countquery script above. See also https://jenkins.dataverse.org/job/IQSS-dataverse-develop/ws/target/query_count.out
+
+Show the plot over time. Make spikes easily apparent. 320,035 queries as of this writing.
+
+Count Database Queries per API Test
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Is it possible? Just a thought.
+
+Teach Developers How to Do Performance Testing Locally
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Do developers know how to use a profiler? Should they use `JMeter <https://github.com/BU-NU-CLOUD-SP18/Dataverse-Scaling%23our-project-video>`_? `statsd-jvm-profiler <https://github.com/etsy/statsd-jvm-profiler>`_? How do you run our :ref:`locust` tests? Should we continue using that tool? Give developers time and space to try out tools and document any tips along the way. For this stage, small data is fine.
+
+Automate Performance Testing
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We are already using two excellent continuous integration (CI) tools, Jenkins and GitHub Actions, to test our code. We should add performance testing into the mix (`#4201 <https://github.com/IQSS/dataverse/issues/4201>`_ is an old issue for this but we can open a fresh one). Currently we test every commit on every PR and we should consider if this model makes sense since performance testing will likely take longer to run than regular tests. Once developers are comfortable with their favorite tools, we can pick which ones to automate.
+
+Make Production Data or Equivalent Available to Developers
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If developers are only testing small amounts of data on their laptops, it's hard to detect performance problems. Not every bug fix requires access to data similar to production, but it should be made available. This is not a trivial task! If we are to use actual production data, we need to be very careful to de-identify it. If we start with our `sample-data <https://github.com/IQSS/dataverse-sample-data>`_  repo instead, we'll need to figure out how to make sure we cover cases like many files, many versions, etc.
+
+Automate Performance Testing with Production Data or Equivalent
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Hopefully the environment developers use with production data or equivalent can be made available to our CI tools. Perhaps these tests don't need to be run on every commit to every pull request, but they should be run regularly.
+
+Use Monitoring as Performance Testing
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Monitoring can be seen as a form of testing. How long is a round trip ping to production? What is the Time to First Byte? First Contentful Paint? Largest Contentful Paint? Time to Interactive? We now have a beta server that we could monitor continuously to know if our app is getting faster or slower over time. Should our monitoring of production servers be improved?
+
+Learn from Training and Conferences
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Most likely there is training available that is oriented toward performance. The subject of performance often comes up at conferences as well.
+
+Learn from the Community How They Monitor Performance
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Some members of the Dataverse community are likely users of newish tools like the ELK stack (Elasticsearch, Logstash, and Kibana), the TICK stack (Telegraph InfluxDB Chronograph and Kapacitor), GoAccess, Prometheus, Graphite, and more we haven't even heard of. In the :doc:`/admin/monitoring` section of the Admin Guide, we already encourage the community to share findings, but we could dedicate time to this topic at our annual meeting or community calls.
+
+Teach the Community to Do Performance Testing
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+We have a worldwide community of developers. We should do what we can in the form of documentation and other resources to help them develop performant code.
+
+Conclusion
+----------
+
+Given its long history, Dataverse has encountered many performance problems over the years. The core team is conversant in how to make the app more performant, but investment in learning additional tools and best practices would likely yield dividends. We should automate our performance testing, catching more problems before code is merged.
diff --git a/doc/sphinx-guides/source/developers/remote-users.rst b/doc/sphinx-guides/source/developers/remote-users.rst
index 21d36d28a75..484abe9ccf0 100755
--- a/doc/sphinx-guides/source/developers/remote-users.rst
+++ b/doc/sphinx-guides/source/developers/remote-users.rst
@@ -30,12 +30,16 @@ Now when you go to http://localhost:8080/oauth2/firstLogin.xhtml you should be p
 
 ----
 
+.. _oidc-dev:
+
 OpenID Connect (OIDC)
 ---------------------
 
+STOP! ``oidc-keycloak-auth-provider.json`` was changed from http://localhost:8090 to http://keycloak.mydomain.com:8090 to test :ref:`bearer-tokens`. In addition, ``docker-compose-dev.yml`` in the root of the repo was updated to start up Keycloak. To use these, you should add ``127.0.0.1 keycloak.mydomain.com`` to your ``/etc/hosts file``. If you'd like to use the docker compose as described below (``conf/keycloak/docker-compose.yml``), you should revert the change to ``oidc-keycloak-auth-provider.json``.
+
 If you are working on the OpenID Connect (OIDC) user authentication flow, you do not need to connect to a remote provider (as explained in :doc:`/installation/oidc`) to test this feature. Instead, you can use the available configuration that allows you to run a test Keycloak OIDC identity management service locally through a Docker container.
 
-(Please note! The client secret (``ss6gE8mODCDfqesQaSG3gwUwZqZt547E``) is hard-coded in ``oidc-realm.json`` and ``oidc-keycloak-auth-provider.json``. Do not use this config in production! This is only for developers.)
+(Please note! The client secret (``94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8``) is hard-coded in ``test-realm.json`` and ``oidc-keycloak-auth-provider.json``. Do not use this config in production! This is only for developers.)
 
 You can find this configuration in ``conf/keycloak``. There are two options available in this directory to run a Keycloak container: bash script or docker-compose.
 
@@ -51,15 +55,27 @@ Now load the configuration defined in ``oidc-keycloak-auth-provider.json`` into
 
 You should see the new provider, called "OIDC-Keycloak", under "Other options" on the Log In page.
 
-You should be able to log into Keycloak with the following credentials:
+You should be able to log into Keycloak with the one of the following credentials:
 
-- username: kcuser
-- password: kcpassword
+.. list-table::
+
+  * - Username
+    - Password
+  * - admin
+    - admin
+  * - curator
+    - curator
+  * - user
+    - user
+  * - affiliate
+    - affiliate
 
 In case you want to stop and remove the Keycloak container, just run the other available bash script:
 
 ``./rm-keycloak.sh``
 
+Note: the Keycloak admin to login at the admin console is ``kcadmin:kcpassword``
+
 ----
 
 Previous: :doc:`unf/index` | Next: :doc:`geospatial`
diff --git a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst
index 4d323455d28..d7f270a4e38 100644
--- a/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst
+++ b/doc/sphinx-guides/source/developers/s3-direct-upload-api.rst
@@ -69,8 +69,9 @@ In the single part case, only one call to the supplied URL is required:
 
 .. code-block:: bash
 
-    curl -H 'x-amz-tagging:dv-state=temp' -X PUT -T <filename> "<supplied url>"
+    curl -i -H 'x-amz-tagging:dv-state=temp' -X PUT -T <filename> "<supplied url>"
 
+Note that without the ``-i`` flag, you should not expect any output from the command above. With the ``-i`` flag, you should expect to see a "200 OK" response.
 
 In the multipart case, the client must send each part and collect the 'eTag' responses from the server. The calls for this are the same as the one for the single part case except that each call should send a <partSize> slice of the total file, with the last part containing the remaining bytes.
 The responses from the S3 server for these calls will include the 'eTag' for the uploaded part. 
@@ -115,8 +116,8 @@ The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.Data
 
   curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA"
   
-Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. 
-With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
+Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide.
+With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
 
 To add multiple Uploaded Files to the Dataset
 ---------------------------------------------
@@ -146,8 +147,8 @@ The allowed checksum algorithms are defined by the edu.harvard.iq.dataverse.Data
 
   curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/datasets/:persistentId/addFiles?persistentId=$PERSISTENT_IDENTIFIER" -F "jsonData=$JSON_DATA"
 
-Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method.
-With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
+Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide.
+With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
 
 
 Replacing an existing file in the Dataset
@@ -176,8 +177,8 @@ Note that the API call does not validate that the file matches the hash value su
 
   curl -X POST -H "X-Dataverse-key: $API_TOKEN" "$SERVER_URL/api/files/$FILE_IDENTIFIER/replace" -F "jsonData=$JSON_DATA"
   
-Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. 
-With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
+Note that this API call can be used independently of the others, e.g. supporting use cases in which the file already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide.
+With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
 
 Replacing multiple existing files in the Dataset
 ------------------------------------------------
@@ -274,5 +275,5 @@ The JSON object returned as a response from this API call includes a "data" that
   }
 
 
-Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method.
-With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifer must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
+Note that this API call can be used independently of the others, e.g. supporting use cases in which the files already exists in S3/has been uploaded via some out-of-band method. Enabling out-of-band uploads is described at :ref:`file-storage` in the Configuration Guide.
+With current S3 stores the object identifier must be in the correct bucket for the store, include the PID authority/identifier of the parent dataset, and be guaranteed unique, and the supplied storage identifier must be prefaced with the store identifier used in the Dataverse installation, as with the internally generated examples above.
diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst
index 4b3d5fd0a55..8e60378fd90 100755
--- a/doc/sphinx-guides/source/developers/testing.rst
+++ b/doc/sphinx-guides/source/developers/testing.rst
@@ -5,7 +5,7 @@ Testing
 In order to keep our codebase healthy, the Dataverse Project encourages developers to write automated tests in the form of unit tests and integration tests. We also welcome ideas for how to improve our automated testing.
 
 .. contents:: |toctitle|
-	:local:
+    :local:
 
 The Health of a Codebase
 ------------------------
@@ -46,13 +46,15 @@ The main takeaway should be that we care about unit testing enough to measure th
 Writing Unit Tests with JUnit
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-We are aware that there are newer testing tools such as TestNG, but we use `JUnit <http://junit.org>`_ because it's tried and true.
-We support both (legacy) JUnit 4.x tests (forming the majority of our tests) and
-newer JUnit 5 based testing.
+We are aware that there are newer testing tools such as TestNG, but we use `JUnit <https://junit.org>`_ because it's tried and true.
+We support JUnit 5 based testing and require new tests written with it.
+(Since Dataverse 6.0, we migrated all of our tests formerly based on JUnit 4.)
 
-NOTE: When adding new tests, you should give JUnit 5 a go instead of adding more dependencies to JUnit 4.x.
-
-If writing tests is new to you, poke around existing unit tests which all end in ``Test.java`` and live under ``src/test``. Each test is annotated with ``@Test`` and should have at least one assertion which specifies the expected result. In Netbeans, you can run all the tests in it by clicking "Run" -> "Test File". From the test file, you should be able to navigate to the code that's being tested by right-clicking on the file and clicking "Navigate" -> "Go to Test/Tested class". Likewise, from the code, you should be able to use the same "Navigate" menu to go to the tests.
+If writing tests is new to you, poke around existing unit tests which all end in ``Test.java`` and live under ``src/test``.
+Each test is annotated with ``@Test`` and should have at least one assertion which specifies the expected result.
+In Netbeans, you can run all the tests in it by clicking "Run" -> "Test File".
+From the test file, you should be able to navigate to the code that's being tested by right-clicking on the file and clicking "Navigate" -> "Go to Test/Tested class".
+Likewise, from the code, you should be able to use the same "Navigate" menu to go to the tests.
 
 NOTE: Please remember when writing tests checking possibly localized outputs to check against ``en_US.UTF-8`` and ``UTC``
 l10n strings!
@@ -62,22 +64,24 @@ Refactoring Code to Make It Unit-Testable
 
 Existing code is not necessarily written in a way that lends itself to easy testing. Generally speaking, it is difficult to write unit tests for both JSF "backing" beans (which end in ``Page.java``) and "service" beans (which end in ``Service.java``) because they require the database to be running in order to test them. If service beans can be exercised via API they can be tested with integration tests (described below) but a good technique for making the logic testable it to move code to "util beans" (which end in ``Util.java``) that operate on Plain Old Java Objects (POJOs). ``PrivateUrlUtil.java`` is a good example of moving logic from ``PrivateUrlServiceBean.java`` to a "util" bean to make the code testable.
 
-Parameterized Tests and JUnit Theories
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Parameterized Tests
+^^^^^^^^^^^^^^^^^^^
+
 Often times you will want to test a method multiple times with similar values.
 In order to avoid test bloat (writing a test for every data combination),
 JUnit offers Data-driven unit tests. This allows a test to be run for each set
 of defined data values.
 
-JUnit 4 uses ``Parameterized.class`` and ``Theories.class``. For reference, take a look at issue https://github.com/IQSS/dataverse/issues/5619.
-
-JUnit 5 doesn't offer theories (see `jqwik <https://jqwik.net>`_ for this), but
-greatly extended parameterized testing. Some guidance how to write those:
+JUnit 5 offers great parameterized testing. Some guidance how to write those:
 
 - https://junit.org/junit5/docs/current/user-guide/#writing-tests-parameterized-tests
 - https://www.baeldung.com/parameterized-tests-junit-5
 - https://blog.codefx.org/libraries/junit-5-parameterized-tests/
-- See also some examples in our codebase.
+- See also many examples in our codebase.
+
+Note that JUnit 5 also offers support for custom test parameter resolvers. This enables keeping tests cleaner,
+as preparation might happen within some extension and the test code is more focused on the actual testing.
+See https://junit.org/junit5/docs/current/user-guide/#extensions-parameter-resolution for more information.
 
 JUnit 5 Test Helper Extensions
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -85,22 +89,35 @@ JUnit 5 Test Helper Extensions
 Our codebase provides little helpers to ease dealing with state during tests.
 Some tests might need to change something which should be restored after the test ran.
 
-For unit tests, the most interesting part is to set a JVM setting just for the current test.
-Please use the ``@JvmSetting(key = JvmSettings.XXX, value = "")`` annotation on a test method or
-a test class to set and clear the property automatically.
+For unit tests, the most interesting part is to set a JVM setting just for the current test or a whole test class.
+(Which might be an inner class, too!). Please make use of the ``@JvmSetting(key = JvmSettings.XXX, value = "")``
+annotation and also make sure to annotate the test class with ``@LocalJvmSettings``.
+
+Inspired by JUnit's ``@MethodSource`` annotation, you may use ``@JvmSetting(key = JvmSettings.XXX, method = "zzz")``
+to reference a static method located in the same test class by name (i. e. ``private static String zzz() {}``) to allow
+retrieving dynamic data instead of String constants only. (Note the requirement for a *static* method!)
+
+If you want to delete a setting, simply provide a ``null`` value. This can be used to override a class-wide setting
+or some other default that is present for some reason.
 
-To set arbitrary system properties for the current test, a similar extension
-``@SystemProperty(key = "", value = "")`` has been added.
+To set arbitrary system properties for the current test, a similar extension ``@SystemProperty(key = "", value = "")``
+has been added. (Note: it does not support method references.)
 
 Both extensions will ensure the global state of system properties is non-interfering for
 test executions. Tests using these extensions will be executed in serial.
 
+This settings helper may be extended at a later time to manipulate settings in a remote instance during integration
+or end-to-end testing. Stay tuned!
+
 Observing Changes to Code Coverage
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 Once you've written some tests, you're probably wondering how much you've helped to increase the code coverage. In Netbeans, do a "clean and build." Then, under the "Projects" tab, right-click "dataverse" and click "Code Coverage" -> "Show Report". For each Java file you have open, you should be able to see the percentage of code that is covered by tests and every line in the file should be either green or red. Green indicates that the line is being exercised by a unit test and red indicates that it is not.
 
-In addition to seeing code coverage in Netbeans, you can also see code coverage reports by opening ``target/site/jacoco/index.html`` in your browser.
+In addition to seeing code coverage in Netbeans, you can also see code coverage reports by opening ``target/site/jacoco-X-test-coverage-report/index.html`` in your browser.
+Depending on the report type you want to look at, let ``X`` be one of ``unit``, ``integration`` or ``merged``.
+"Merged" will display combined coverage of both unit and integration test, but does currently not cover API tests.
+
 
 Testing Commands
 ^^^^^^^^^^^^^^^^
@@ -116,11 +133,14 @@ In addition, there is a writeup on "The Testable Command" at https://github.com/
 Running Non-Essential (Excluded) Unit Tests
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-You should be aware that some unit tests have been deemed "non-essential" and have been annotated with ``@Category(NonEssentialTests.class)`` and are excluded from the "dev" Maven profile, which is the default profile. All unit tests (that have not been annotated with ``@Ignore``), including these non-essential tests, are run from continuous integration systems such as Jenkins and GitHub Actions with the following ``mvn`` command that invokes a non-default profile:
+You should be aware that some unit tests have been deemed "non-essential" and have been annotated with ``@Tag(Tags.NOT_ESSENTIAL_UNITTESTS)`` and are excluded from the "dev" Maven profile, which is the default profile.
+All unit tests (that have not been annotated with ``@Disable``), including these non-essential tests, are run from continuous integration systems such as Jenkins and GitHub Actions with the following ``mvn`` command that invokes a non-default profile:
 
 ``mvn test -P all-unit-tests``
 
-Generally speaking, unit tests have been flagged as non-essential because they are slow or because they require an Internet connection. You should not feel obligated to run these tests continuously but you can use the ``mvn`` command above to run them. To iterate on the unit test in Netbeans and execute it with "Run -> Test File", you must temporarily comment out the annotation flagging the test as non-essential.
+Generally speaking, unit tests have been flagged as non-essential because they are slow or because they require an Internet connection.
+You should not feel obligated to run these tests continuously but you can use the ``mvn`` command above to run them.
+To iterate on the unit test in Netbeans and execute it with "Run -> Test File", you must temporarily comment out the annotation flagging the test as non-essential.
 
 Integration Tests
 -----------------
@@ -170,42 +190,38 @@ Finally, run the script:
 
   $ ./ec2-create-instance.sh -g jenkins.yml -l log_dir
 
-Running the full API test suite using Docker
+Running the Full API Test Suite Using Docker
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-To run the full suite of integration tests on your laptop, we recommend using the "all in one" Docker configuration described in ``conf/docker-aio/readme.md`` in the root of the repo.
-
-Alternatively, you can run tests against the app server running on your laptop by following the "getting set up" steps below.
+To run the full suite of integration tests on your laptop, we recommend running Dataverse and its dependencies in Docker, as explained in the :doc:`/container/dev-usage` section of the Container Guide. This environment provides additional services (such as S3) that are used in testing.
 
-Getting Set Up to Run REST Assured Tests
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Running the APIs Without Docker (Classic Dev Env)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Unit tests are run automatically on every build, but dev environments and servers require special setup to run REST Assured tests. In short, the Dataverse Software needs to be placed into an insecure mode that allows arbitrary users and datasets to be created and destroyed. This differs greatly from the out-of-the-box behavior of the Dataverse Software, which we strive to keep secure for sysadmins installing the software for their institutions in a production environment.
+While it is possible to run a good number of API tests without using Docker in our :doc:`classic-dev-env`, we are transitioning toward including additional services (such as S3) in our Dockerized development environment (:doc:`/container/dev-usage`), so you will probably find it more convenient to it instead.
 
-The :doc:`dev-environment` section currently refers developers here for advice on getting set up to run REST Assured tests, but we'd like to add some sort of "dev" flag to the installer to put the Dataverse Software in "insecure" mode, with lots of scary warnings that this dev mode should not be used in production.
-
-The instructions below assume a relatively static dev environment on a Mac. There is a newer "all in one" Docker-based approach documented in the :doc:`/developers/containers` section under "Docker" that you may like to play with as well.
+Unit tests are run automatically on every build, but dev environments and servers require special setup to run API (REST Assured) tests. In short, the Dataverse software needs to be placed into an insecure mode that allows arbitrary users and datasets to be created and destroyed (this is done automatically in the Dockerized environment, as well as the steps described below). This differs greatly from the out-of-the-box behavior of the Dataverse software, which we strive to keep secure for sysadmins installing the software for their institutions in a production environment.
 
 The Burrito Key
 ^^^^^^^^^^^^^^^
 
-For reasons that have been lost to the mists of time, the Dataverse Software really wants you to to have a burrito. Specifically, if you're trying to run REST Assured tests and see the error "Dataverse config issue: No API key defined for built in user management", you must run the following curl command (or make an equivalent change to your database):
+For reasons that have been lost to the mists of time, the Dataverse software really wants you to to have a burrito. Specifically, if you're trying to run REST Assured tests and see the error "Dataverse config issue: No API key defined for built in user management", you must run the following curl command (or make an equivalent change to your database):
 
 ``curl -X PUT -d 'burrito' http://localhost:8080/api/admin/settings/BuiltinUsers.KEY``
 
-Without this "burrito" key in place, REST Assured will not be able to create users. We create users to create objects we want to test, such as Dataverse collections, datasets, and files.
+Without this "burrito" key in place, REST Assured will not be able to create users. We create users to create objects we want to test, such as collections, datasets, and files.
 
-Root Dataverse Collection Permissions
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Root Collection Permissions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-In your browser, log in as dataverseAdmin (password: admin) and click the "Edit" button for your root Dataverse collection. Navigate to Permissions, then the Edit Access button. Under "Who can add to this Dataverse collection?" choose "Anyone with a Dataverse installation account can add sub Dataverse collections and datasets" if it isn't set to this already.
+In your browser, log in as dataverseAdmin (password: admin) and click the "Edit" button for your root collection. Navigate to Permissions, then the Edit Access button. Under "Who can add to this collection?" choose "Anyone with a Dataverse installation account can add sub collections and datasets" if it isn't set to this already.
 
 Alternatively, this same step can be done with this script: ``scripts/search/tests/grant-authusers-add-on-root``
 
-Publish Root Dataverse Collection
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Publish Root Collection
+^^^^^^^^^^^^^^^^^^^^^^^
 
-The root Dataverse collection must be published for some of the REST Assured tests to run.
+The root collection must be published for some of the REST Assured tests to run.
 
 dataverse.siteUrl
 ^^^^^^^^^^^^^^^^^
@@ -218,6 +234,20 @@ If ``dataverse.siteUrl`` is absent, you can add it with:
 
 ``./asadmin create-jvm-options "-Ddataverse.siteUrl=http\://localhost\:8080"``
 
+dataverse.oai.server.maxidentifiers
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The OAI Harvesting tests require that the paging limit for ListIdentifiers must be set to 2, in order to be able to trigger this paging behavior without having to create and export too many datasets:
+
+``./asadmin create-jvm-options "-Ddataverse.oai.server.maxidentifiers=2"``
+
+dataverse.oai.server.maxrecords
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The OAI Harvesting tests require that the paging limit for ListRecords must be set to 2, in order to be able to trigger this paging behavior without having to create and export too many datasets:
+
+``./asadmin create-jvm-options "-Ddataverse.oai.server.maxrecords=2"``
+
 Identifier Generation
 ^^^^^^^^^^^^^^^^^^^^^
 
@@ -238,17 +268,22 @@ Remember, it’s only a test (and it's not graded)! Some guidelines to bear in m
 - Map out which logical functions you want to test
 - Understand what’s being tested and ensure it’s repeatable
 - Assert the conditions of success / return values for each operation
-  * A useful resource would be `HTTP status codes <http://www.restapitutorial.com/httpstatuscodes.html>`_
+  * A useful resource would be `HTTP status codes <https://www.restapitutorial.com/httpstatuscodes.html>`_
 - Let the code do the labor; automate everything that happens when you run your test file.
+- If you need to test an optional service (S3, etc.), add it to our docker compose file. See :doc:`/container/dev-usage`.
 - Just as with any development, if you’re stuck: ask for help!
 
-To execute existing integration tests on your local Dataverse installation, a helpful command line tool to use is `Maven <http://maven.apache.org/ref/3.1.0/maven-embedder/cli.html>`_. You should have Maven installed as per the `Development Environment <http://guides.dataverse.org/en/latest/developers/dev-environment.html>`_ guide, but if not it’s easily done via Homebrew: ``brew install maven``.
+To execute existing integration tests on your local Dataverse installation, a helpful command line tool to use is `Maven <https://maven.apache.org/ref/3.1.0/maven-embedder/cli.html>`_. You should have Maven installed as per the `Development Environment <https://guides.dataverse.org/en/latest/developers/dev-environment.html>`_ guide, but if not it’s easily done via Homebrew: ``brew install maven``.
 
 Once installed, you may run commands with ``mvn [options] [<goal(s)>] [<phase(s)>]``.
 
-+ If you want to run just one particular API test, it’s as easy as you think:
++ If you want to run just one particular API test class:
 
-  ``mvn test -Dtest=FileRecordJobIT``
+  ``mvn test -Dtest=UsersIT``
+
++ If you want to run just one particular API test method,
+
+  ``mvn test -Dtest=UsersIT#testMergeAccounts``
 
 + To run more than one test at a time, separate by commas:
 
@@ -277,35 +312,39 @@ To run a test with Testcontainers, you will need to write a JUnit 5 test.
 Please make sure to:
 
 1. End your test class with ``IT``
-2. Provide a ``@Tag("testcontainers")`` to be picked up during testing.
+2. Annotate the test class with two tags:
 
-.. code:: java
+   .. code:: java
 
-   /** A very minimal example for a Testcontainers integration test class. */
-   @Testcontainers
-   @Tag("testcontainers")
-   class MyExampleIT { /* ... */ }
+       /** A very minimal example for a Testcontainers integration test class. */
+       @Testcontainers(disabledWithoutDocker = true)
+       @Tag(edu.harvard.iq.dataverse.util.testing.Tags.INTEGRATION_TEST)
+       @Tag(edu.harvard.iq.dataverse.util.testing.Tags.USES_TESTCONTAINERS)
+       class MyExampleIT { /* ... */ }
 
-If using upstream Modules, e.g. for PostgreSQL or similar, you will need to add
+If using upstream modules, e.g. for PostgreSQL or similar, you will need to add
 a dependency to ``pom.xml`` if not present. `See the PostgreSQL module example. <https://www.testcontainers.org/modules/databases/postgres/>`_
 
 To run these tests, simply call out to Maven:
 
 .. code::
 
-	 mvn -P tc verify
+    mvn verify
+
+Notes:
 
-.. note::
+1. Remember to have Docker ready to serve or tests will fail.
+2. You can skip running unit tests by adding ``-DskipUnitTests``
+3. You can choose to ignore test with Testcontainers by adding ``-Dit.groups='integration & !testcontainers'``
+   Learn more about `filter expressions in the JUnit 5 guide <https://junit.org/junit5/docs/current/user-guide/#running-tests-tag-expressions>`_.
 
-	 1. Remember to have Docker ready to serve or tests will fail.
-	 2. This will not run any unit tests or API tests.
 
-Measuring Coverage of Integration Tests
----------------------------------------
+Measuring Coverage of API Tests
+-------------------------------
 
-Measuring the code coverage of integration tests with Jacoco requires several steps. In order to make these steps clear we'll use "/usr/local/payara5" as the Payara directory and "dataverse" as the Payara Unix user.
+Measuring the code coverage of API tests with Jacoco requires several steps. In order to make these steps clear we'll use "/usr/local/payara6" as the Payara directory and "dataverse" as the Payara Unix user.
 
-Please note that this was tested under Glassfish 4 but it is hoped that the same steps will work with Payara 5.
+Please note that this was tested under Glassfish 4 but it is hoped that the same steps will work with Payara.
 
 Add jacocoagent.jar to Payara
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -324,9 +363,9 @@ Note that we are running the following commands as the user "dataverse". In shor
   cd local/jacoco-0.8.1
   wget https://github.com/jacoco/jacoco/releases/download/v0.8.1/jacoco-0.8.1.zip
   unzip jacoco-0.8.1.zip
-  /usr/local/payara5/bin/asadmin stop-domain
-  cp /home/dataverse/local/jacoco-0.8.1/lib/jacocoagent.jar /usr/local/payara5/glassfish/lib
-  /usr/local/payara5/bin/asadmin start-domain
+  /usr/local/payara6/bin/asadmin stop-domain
+  cp /home/dataverse/local/jacoco-0.8.1/lib/jacocoagent.jar /usr/local/payara6/glassfish/lib
+  /usr/local/payara6/bin/asadmin start-domain
 
 Add jacococli.jar to the WAR File
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -349,21 +388,21 @@ Run this as the "dataverse" user.
 
 .. code-block:: bash
 
-  /usr/local/payara5/bin/asadmin deploy dataverse-jacoco.war
+  /usr/local/payara6/bin/asadmin deploy dataverse-jacoco.war
 
-Note that after deployment the file "/usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec" exists and is empty.
+Note that after deployment the file "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" exists and is empty.
 
-Run Integration Tests
-~~~~~~~~~~~~~~~~~~~~~
+Run API Tests
+~~~~~~~~~~~~~
 
 Note that even though you see "docker-aio" in the command below, we assume you are not necessarily running the test suite within Docker. (Some day we'll probably move this script to another directory.) For this reason, we pass the URL with the normal port (8080) that app servers run on to the ``run-test-suite.sh`` script.
 
-Note that "/usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec" will become non-empty after you stop and start Payara. You must stop and start Payara before every run of the integration test suite.
+Note that "/usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec" will become non-empty after you stop and start Payara. You must stop and start Payara before every run of the integration test suite.
 
 .. code-block:: bash
 
-  /usr/local/payara5/bin/asadmin stop-domain
-  /usr/local/payara5/bin/asadmin start-domain
+  /usr/local/payara6/bin/asadmin stop-domain
+  /usr/local/payara6/bin/asadmin start-domain
   git clone https://github.com/IQSS/dataverse.git
   cd dataverse
   conf/docker-aio/run-test-suite.sh http://localhost:8080
@@ -378,7 +417,7 @@ Run these commands as the "dataverse" user. The ``cd dataverse`` means that you
 .. code-block:: bash
 
   cd dataverse
-  java -jar /home/dataverse/local/jacoco-0.8.1/lib/jacococli.jar report --classfiles target/classes --sourcefiles src/main/java --html target/coverage-it/ /usr/local/payara5/glassfish/domains/domain1/config/jacoco.exec
+  java -jar /home/dataverse/local/jacoco-0.8.1/lib/jacococli.jar report --classfiles target/classes --sourcefiles src/main/java --html target/coverage-it/ /usr/local/payara6/glassfish/domains/domain1/config/jacoco.exec
 
 Read Code Coverage Report
 ~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -388,6 +427,8 @@ target/coverage-it/index.html is the place to start reading the code coverage re
 Load/Performance Testing
 ------------------------
 
+.. _locust:
+
 Locust
 ~~~~~~
 
@@ -487,7 +528,7 @@ Future Work on Integration Tests
 - Automate testing of dataverse-client-python: https://github.com/IQSS/dataverse-client-python/issues/10
 - Work with @leeper on testing the R client: https://github.com/IQSS/dataverse-client-r
 - Review and attempt to implement "API Test Checklist" from @kcondon at https://docs.google.com/document/d/199Oq1YwQ4pYCguaeW48bIN28QAitSk63NbPYxJHCCAE/edit?usp=sharing
-- Generate code coverage reports for **integration** tests: https://github.com/pkainulainen/maven-examples/issues/3 and http://www.petrikainulainen.net/programming/maven/creating-code-coverage-reports-for-unit-and-integration-tests-with-the-jacoco-maven-plugin/
+- Generate code coverage reports for **integration** tests: https://github.com/pkainulainen/maven-examples/issues/3 and https://www.petrikainulainen.net/programming/maven/creating-code-coverage-reports-for-unit-and-integration-tests-with-the-jacoco-maven-plugin/
 - Consistent logging of API Tests. Show test name at the beginning and end and status codes returned.
 - expected passing and known/expected failing integration tests: https://github.com/IQSS/dataverse/issues/4438
 
@@ -499,7 +540,6 @@ Browser-Based Testing
 Installation Testing
 ~~~~~~~~~~~~~~~~~~~~
 
-- Run `vagrant up` on a server to test the installer
 - Work with @donsizemore to automate testing of https://github.com/GlobalDataverseCommunityConsortium/dataverse-ansible
 
 Future Work on Load/Performance Testing
diff --git a/doc/sphinx-guides/source/developers/tips.rst b/doc/sphinx-guides/source/developers/tips.rst
index bf75a05f84e..764434d1896 100755
--- a/doc/sphinx-guides/source/developers/tips.rst
+++ b/doc/sphinx-guides/source/developers/tips.rst
@@ -19,20 +19,20 @@ Undeploy the war File from the Dataverse Software Installation Script
 
 Because the initial deployment of the war file was done outside of Netbeans by the Dataverse Software installation script, it's a good idea to undeploy that war file to give Netbeans a clean slate to work with.
 
-Assuming you installed Payara in ``/usr/local/payara5``, run the following ``asadmin`` command to see the version of the Dataverse Software that the Dataverse Software installation script deployed:
+Assuming you installed Payara in ``/usr/local/payara6``, run the following ``asadmin`` command to see the version of the Dataverse Software that the Dataverse Software installation script deployed:
 
-``/usr/local/payara5/bin/asadmin list-applications``
+``/usr/local/payara6/bin/asadmin list-applications``
 
 You will probably see something like ``dataverse-5.0 <ejb, web>`` as the output. To undeploy, use whichever version you see like this:
 
-``/usr/local/payara5/bin/asadmin undeploy dataverse-5.0``
+``/usr/local/payara6/bin/asadmin undeploy dataverse-5.0``
 
 Now that Payara doesn't have anything deployed, we can proceed with getting Netbeans set up to deploy the code.
 
 Add Payara as a Server in Netbeans
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara5``. The defaults are fine so you can click "Next" and "Finish".
+Launch Netbeans and click "Tools" and then "Servers". Click "Add Server" and select "Payara Server" and set the installation location to ``/usr/local/payara6``. The defaults are fine so you can click "Next" and "Finish".
 
 Please note that if you are on a Mac, Netbeans may be unable to start Payara due to proxy settings in Netbeans. Go to the "General" tab in Netbeans preferences and click "Test connection" to see if you are affected. If you get a green checkmark, you're all set. If you get a red exclamation mark, change "Proxy Settings" to "No Proxy" and retest. A more complicated answer having to do with changing network settings is available at https://discussions.apple.com/thread/7680039?answerId=30715103022#30715103022 and the bug is also described at https://netbeans.org/bugzilla/show_bug.cgi?id=268076
 
@@ -117,7 +117,7 @@ Deploying With ``asadmin``
 
 Sometimes you want to deploy code without using Netbeans or from the command line on a server you have ssh'ed into.
 
-For the ``asadmin`` commands below, we assume you have already changed directories to ``/usr/local/payara5/glassfish/bin`` or wherever you have installed Payara.
+For the ``asadmin`` commands below, we assume you have already changed directories to ``/usr/local/payara6/glassfish/bin`` or wherever you have installed Payara.
 
 There are four steps to this process:
 
@@ -238,6 +238,8 @@ with the following code in ``SettingsWrapper.java``:
 
 A more serious example would be direct calls to PermissionServiceBean methods used in render logic expressions. This is something that has happened and caused some problems in real life. A simple permission service lookup (for example, whether a user is authorized to create a dataset in the current dataverse) can easily take 15 database queries. Repeated multiple times, this can quickly become a measurable delay in rendering the page. PermissionsWrapper must be used exclusively for any such lookups from JSF pages.
 
+See also :doc:`performance`.
+
 ----
 
 Previous: :doc:`dev-environment` | Next: :doc:`troubleshooting`
diff --git a/doc/sphinx-guides/source/developers/tools.rst b/doc/sphinx-guides/source/developers/tools.rst
index cbd27d6e8d2..9d2740fab6a 100755
--- a/doc/sphinx-guides/source/developers/tools.rst
+++ b/doc/sphinx-guides/source/developers/tools.rst
@@ -25,38 +25,23 @@ Maven
 
 With Maven installed you can run ``mvn package`` and ``mvn test`` from the command line. It can be downloaded from https://maven.apache.org
 
-.. _vagrant:
-
-Vagrant
-+++++++
-
-Vagrant allows you to spin up a virtual machine running the Dataverse Software on your development workstation. You'll need to install Vagrant from https://www.vagrantup.com and VirtualBox from https://www.virtualbox.org.
-
-We assume you have already cloned the repo from https://github.com/IQSS/dataverse as explained in the :doc:`/developers/dev-environment` section.
-
-From the root of the git repo (where the ``Vagrantfile`` is), run ``vagrant up`` and eventually you should be able to reach a Dataverse installation at http://localhost:8888 (the ``forwarded_port`` indicated in the ``Vagrantfile``).
-
-Please note that running ``vagrant up`` for the first time should run the ``downloads/download.sh`` script for you to download required software such as an app server, Solr, etc. However, these dependencies change over time so it's a place to look if ``vagrant up`` was working but later fails.
-
-On Windows if you see an error like ``/usr/bin/perl^M: bad interpreter`` you might need to run ``dos2unix`` on the installation scripts. 
-
 PlantUML
 ++++++++
 
-PlantUML is used to create diagrams in the guides and other places. Download it from http://plantuml.com and check out an example script at https://github.com/IQSS/dataverse/blob/v4.6.1/doc/Architecture/components.sh . Note that for this script to work, you'll need the ``dot`` program, which can be installed on Mac with ``brew install graphviz``.
+PlantUML is used to create diagrams in the guides and other places. Download it from https://plantuml.com and check out an example script at https://github.com/IQSS/dataverse/blob/v4.6.1/doc/Architecture/components.sh . Note that for this script to work, you'll need the ``dot`` program, which can be installed on Mac with ``brew install graphviz``.
 
 Eclipse Memory Analyzer Tool (MAT)
 ++++++++++++++++++++++++++++++++++
 
 The Memory Analyzer Tool (MAT) from Eclipse can help you analyze heap dumps, showing you "leak suspects" such as seen at https://github.com/payara/Payara/issues/350#issuecomment-115262625
 
-It can be downloaded from http://www.eclipse.org/mat
+It can be downloaded from https://www.eclipse.org/mat
 
 If the heap dump provided to you was created with ``gcore`` (such as with ``gcore -o /tmp/app.core $app_pid``) rather than ``jmap``, you will need to convert the file before you can open it in MAT. Using ``app.core.13849`` as example of the original 33 GB file, here is how you could convert it into a 26 GB ``app.core.13849.hprof`` file. Please note that this operation took almost 90 minutes:
 
 ``/usr/java7/bin/jmap -dump:format=b,file=app.core.13849.hprof /usr/java7/bin/java app.core.13849``
 
-A file of this size may not "just work" in MAT. When you attempt to open it you may see something like "An internal error occurred during: "Parsing heap dump from '/tmp/heapdumps/app.core.13849.hprof'". Java heap space". If so, you will need to increase the memory allocated to MAT. On Mac OS X, this can be done by editing ``MemoryAnalyzer.app/Contents/MacOS/MemoryAnalyzer.ini`` and increasing the value "-Xmx1024m" until it's high enough to open the file. See also http://wiki.eclipse.org/index.php/MemoryAnalyzer/FAQ#Out_of_Memory_Error_while_Running_the_Memory_Analyzer
+A file of this size may not "just work" in MAT. When you attempt to open it you may see something like "An internal error occurred during: "Parsing heap dump from '/tmp/heapdumps/app.core.13849.hprof'". Java heap space". If so, you will need to increase the memory allocated to MAT. On Mac OS X, this can be done by editing ``MemoryAnalyzer.app/Contents/MacOS/MemoryAnalyzer.ini`` and increasing the value "-Xmx1024m" until it's high enough to open the file. See also https://wiki.eclipse.org/index.php/MemoryAnalyzer/FAQ#Out_of_Memory_Error_while_Running_the_Memory_Analyzer
 
 PageKite
 ++++++++
@@ -73,7 +58,7 @@ The first time you run ``./pagekite.py`` a file at ``~/.pagekite.rc`` will be
 created. You can edit this file to configure PageKite to serve up port 8080
 (the default app server HTTP port) or the port of your choosing.
 
-According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets http://opensource.org/docs/definition.php such as the Dataverse Project.
+According to https://pagekite.net/support/free-for-foss/ PageKite (very generously!) offers free accounts to developers writing software the meets https://opensource.org/docs/definition.php such as the Dataverse Project.
 
 MSV
 +++
@@ -111,7 +96,7 @@ Download SonarQube from https://www.sonarqube.org and start look in the `bin` di
     -Dsonar.test.exclusions='src/test/**,src/main/webapp/resources/**' \
     -Dsonar.issuesReport.html.enable=true \
     -Dsonar.issuesReport.html.location='sonar-issues-report.html' \
-    -Dsonar.jacoco.reportPath=target/jacoco.exec
+    -Dsonar.jacoco.reportPath=target/coverage-reports/jacoco-unit.exec
 
 Once the analysis is complete, you should be able to access http://localhost:9000/dashboard?id=edu.harvard.iq%3Adataverse to see the report. To learn about resource leaks, for example, click on "Bugs", the "Tag", then "leak" or "Rule", then "Resources should be closed".
 
diff --git a/doc/sphinx-guides/source/developers/unf/index.rst b/doc/sphinx-guides/source/developers/unf/index.rst
index 2423877348f..856de209e82 100644
--- a/doc/sphinx-guides/source/developers/unf/index.rst
+++ b/doc/sphinx-guides/source/developers/unf/index.rst
@@ -27,7 +27,7 @@ with Dataverse Software 2.0 and throughout the 3.* lifecycle, UNF v.5
 UNF v.6. Two parallel implementation, in R and Java, will be
 available, for cross-validation.
 
-Learn more: Micah Altman and Gary King. 2007. “A Proposed Standard for the Scholarly Citation of Quantitative Data.” D-Lib Magazine, 13. Publisher’s Version Copy at http://j.mp/2ovSzoT
+Learn more: Micah Altman and Gary King. 2007. “A Proposed Standard for the Scholarly Citation of Quantitative Data.” D-Lib Magazine, 13. Publisher’s Version Copy at https://j.mp/2ovSzoT
 
 **Contents:**
 
diff --git a/doc/sphinx-guides/source/developers/unf/unf-v3.rst b/doc/sphinx-guides/source/developers/unf/unf-v3.rst
index 3f0018d7fa5..98c07b398e0 100644
--- a/doc/sphinx-guides/source/developers/unf/unf-v3.rst
+++ b/doc/sphinx-guides/source/developers/unf/unf-v3.rst
@@ -34,11 +34,11 @@ For example, the number pi at five digits is represented as -3.1415e+, and the n
 
 1. Terminate character strings representing nonmissing values with a POSIX end-of-line character.
 
-2. Encode each character string with `Unicode bit encoding <http://www.unicode.org/versions/Unicode4.0.0/>`_. Versions 3 through 4 use UTF-32BE; Version 4.1 uses UTF-8.
+2. Encode each character string with `Unicode bit encoding <https://www.unicode.org/versions/Unicode4.0.0/>`_. Versions 3 through 4 use UTF-32BE; Version 4.1 uses UTF-8.
 
 3. Combine the vector of character strings into a single sequence, with each character string separated by a POSIX end-of-line character and a null byte.
 
-4. Compute a hash on the resulting sequence using the standard MD5 hashing algorithm for Version 3 and using `SHA256 <http://csrc.nist.gov/publications/fips/fips180-2/fips180-2withchangenotice.pdf>`_ for Version 4. The resulting hash is `base64 <http://www.ietf.org/rfc/rfc3548.txt>`_ encoded to support readability.
+4. Compute a hash on the resulting sequence using the standard MD5 hashing algorithm for Version 3 and using `SHA256 <https://csrc.nist.gov/publications/fips/fips180-2/fips180-2withchangenotice.pdf>`_ for Version 4. The resulting hash is `base64 <https://www.ietf.org/rfc/rfc3548.txt>`_ encoded to support readability.
 
 5. Calculate the UNF for each lower-level data object, using a consistent UNF version and level of precision across the individual UNFs being combined.
 
@@ -49,4 +49,4 @@ For example, the number pi at five digits is represented as -3.1415e+, and the n
 8. Combine UNFs from multiple variables to form a single UNF for an entire data frame, and then combine UNFs for a set of data frames to form a single UNF that represents an entire research study.
 
 Learn more: 
-Software for computing UNFs is available in an R Module, which includes a Windows standalone tool and code for Stata and SAS languages. Also see the following for more details: Micah Altman and Gary King. 2007. "A Proposed Standard for the Scholarly Citation of Quantitative Data," D-Lib Magazine, Vol. 13, No. 3/4 (March). (Abstract: `HTML <http://gking.harvard.edu/files/abs/cite-abs.shtml>`_ | Article: `PDF <http://gking.harvard.edu/files/cite.pdf>`_)
+Software for computing UNFs is available in an R Module, which includes a Windows standalone tool and code for Stata and SAS languages. Also see the following for more details: Micah Altman and Gary King. 2007. "A Proposed Standard for the Scholarly Citation of Quantitative Data," D-Lib Magazine, Vol. 13, No. 3/4 (March). (Abstract: `HTML <https://gking.harvard.edu/files/abs/cite-abs.shtml>`_ | Article: `PDF <https://gking.harvard.edu/files/cite.pdf>`_)
diff --git a/doc/sphinx-guides/source/developers/unf/unf-v6.rst b/doc/sphinx-guides/source/developers/unf/unf-v6.rst
index 9648bae47c8..b2495ff3dd9 100644
--- a/doc/sphinx-guides/source/developers/unf/unf-v6.rst
+++ b/doc/sphinx-guides/source/developers/unf/unf-v6.rst
@@ -156,7 +156,7 @@ For example, to specify a non-default precision the parameter it is specified us
 | Allowed values are {``128`` , ``192`` , ``196`` , ``256``} with ``128`` being the default. 
 | ``R1`` - **truncate** numeric values to ``N`` digits, **instead of rounding**, as previously described.
 
-`Dr. Micah Altman's classic UNF v5 paper <http://www.researchgate.net/publication/200043172_A_Fingerprint_Method_for_Scientific_Data_Verification>`_ mentions another optional parameter ``T###``, for specifying rounding of date and time values (implemented as stripping the values of entire components - fractional seconds, seconds, minutes, hours... etc., progressively) - but it doesn't specify its syntax. It is left as an exercise for a curious reader to contact the author and work out the details, if so desired. (Not implemented in UNF Version 6 by the Dataverse Project).
+`Dr. Micah Altman's classic UNF v5 paper <https://www.researchgate.net/publication/200043172_A_Fingerprint_Method_for_Scientific_Data_Verification>`_ mentions another optional parameter ``T###``, for specifying rounding of date and time values (implemented as stripping the values of entire components - fractional seconds, seconds, minutes, hours... etc., progressively) - but it doesn't specify its syntax. It is left as an exercise for a curious reader to contact the author and work out the details, if so desired. (Not implemented in UNF Version 6 by the Dataverse Project).
 
 Note: we do not recommend truncating character strings at fewer bytes than the default ``128`` (the ``X`` parameter). At the very least this number **must** be high enough so that the printable UNFs of individual variables or files are not truncated, when calculating combined UNFs of files or datasets, respectively. 
 
diff --git a/doc/sphinx-guides/source/developers/version-control.rst b/doc/sphinx-guides/source/developers/version-control.rst
index aacc245af5a..31fc0a4e602 100644
--- a/doc/sphinx-guides/source/developers/version-control.rst
+++ b/doc/sphinx-guides/source/developers/version-control.rst
@@ -24,7 +24,7 @@ The goals of the Dataverse Software branching strategy are:
 - allow for concurrent development
 - only ship stable code
 
-We follow a simplified "git flow" model described at http://nvie.com/posts/a-successful-git-branching-model/ involving a "master" branch, a "develop" branch, and feature branches such as "1234-bug-fix".
+We follow a simplified "git flow" model described at https://nvie.com/posts/a-successful-git-branching-model/ involving a "master" branch, a "develop" branch, and feature branches such as "1234-bug-fix".
 
 Branches
 ~~~~~~~~
diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst
index 038f3497495..53578fe980c 100755
--- a/doc/sphinx-guides/source/developers/windows.rst
+++ b/doc/sphinx-guides/source/developers/windows.rst
@@ -2,84 +2,17 @@
 Windows Development
 ===================
 
-Development on Windows is not well supported, unfortunately. You will have a much easier time if you develop on Mac or Linux as described under :doc:`dev-environment` section.
-
-Vagrant commands appear below and were tested on Windows 10 but the Vagrant environment is currently broken. Please see https://github.com/IQSS/dataverse/issues/6849
+Historically, development on Windows is `not well supported <https://groups.google.com/d/msg/dataverse-community/Hs9j5rIxqPI/-q54751aAgAJ>`_ but as of 2023 a container-based approach is recommended.
 
 .. contents:: |toctitle|
 	:local:
 
-Running the Dataverse Software in Vagrant
------------------------------------------
-
-Install Vagrant
-~~~~~~~~~~~~~~~
-
-Download and install Vagrant from https://www.vagrantup.com
-
-Vagrant advises you to reboot but let's install VirtualBox first.
-
-Install VirtualBox
-~~~~~~~~~~~~~~~~~~
-
-Download and install VirtualBox from https://www.virtualbox.org
-
-Note that we saw an error saying "Oracle VM VirtualBox 5.2.8 Setup Wizard ended prematurely" but then we re-ran the installer and it seemed to work.
-
-Reboot
-~~~~~~
-
-Again, Vagrant asks you to reboot, so go ahead.
-
-Install Git
-~~~~~~~~~~~
-
-Download and install Git from https://git-scm.com
-
-Configure Git to use Unix Line Endings
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Launch Git Bash and run the following commands:
-
-``git config --global core.autocrlf input``
-
-Pro tip: Use Shift-Insert to paste into Git Bash.
-
-See also https://help.github.com/articles/dealing-with-line-endings/
-
-If you skip this step you are likely to see the following error when you run ``vagrant up``.
-
-``/tmp/vagrant-shell: ./install: /usr/bin/perl^M: bad interpreter: No such file or directory``
-
-Clone Git Repo
-~~~~~~~~~~~~~~
-
-From Git Bash, run the following command:
-
-``git clone https://github.com/IQSS/dataverse.git``
-
-vagrant up
-~~~~~~~~~~
-
-From Git Bash, run the following commands:
-
-``cd dataverse``
-
-The ``dataverse`` directory you changed is the one you just cloned. Vagrant will operate on a file called ``Vagrantfile``.
-
-``vagrant up``
-
-After a long while you hopefully will have a Dataverse installation available at http://localhost:8888
-
-Improving Windows Support
--------------------------
-
-Windows Subsystem for Linux
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Running Dataverse in Docker on Windows
+--------------------------------------
 
-We have been unable to get Windows Subsystem for Linux (WSL) to work. We tried following the steps at https://docs.microsoft.com/en-us/windows/wsl/install-win10 but the "Get" button was greyed out when we went to download Ubuntu.
+See the `post <https://groups.google.com/g/dataverse-dev/c/utqkZ7gYsf4/m/4IDtsvKSAwAJ>`_ by Akio Sone for additional details, but please observe the following:
 
-Discussion and Feedback
-~~~~~~~~~~~~~~~~~~~~~~~
+- In git, the line-ending setting should be set to always LF (line feed, ``core.autocrlf=input``)
+- You must have jq installed: https://jqlang.github.io/jq/download/
 
-For more discussion of Windows support for Dataverse Software development see our community list thread `"Do you want to develop on Windows?" <https://groups.google.com/d/msg/dataverse-community/Hs9j5rIxqPI/-q54751aAgAJ>`_ We would be happy to incorporate feedback from Windows developers into this page. The :doc:`documentation` section describes how.
+One the above is all set you can move on to :doc:`/container/dev-usage` in the Container Guide.
diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst
index f6eda53d718..e4eeea9b6d0 100755
--- a/doc/sphinx-guides/source/index.rst
+++ b/doc/sphinx-guides/source/index.rst
@@ -45,7 +45,7 @@ Other Resources
 Additional information about the Dataverse Project itself
 including presentations, information about upcoming releases, data
 management and citation, and announcements can be found at
-`http://dataverse.org/ <http://dataverse.org/>`__
+`https://dataverse.org/ <https://dataverse.org/>`__
 
 **User Group**
 
@@ -68,7 +68,7 @@ The support email address is `support@dataverse.org <mailto:support@dataverse.or
 **Reporting Issues and Contributing**
 
 Report bugs and add feature requests in `GitHub Issues <https://github.com/IQSS/dataverse/issues>`__
-or use `GitHub pull requests <http://guides.dataverse.org/en/latest/developers/version-control.html#how-to-make-a-pull-request>`__,
+or use `GitHub pull requests <https://guides.dataverse.org/en/latest/developers/version-control.html#how-to-make-a-pull-request>`__,
 if you have some code, scripts or documentation that you'd like to share.
 If you have a **security issue** to report, please email `security@dataverse.org <mailto:security@dataverse.org>`__. See also :ref:`reporting-security-issues`.
 
diff --git a/doc/sphinx-guides/source/installation/advanced.rst b/doc/sphinx-guides/source/installation/advanced.rst
index 4f06ed37d01..3de5d0ea07c 100644
--- a/doc/sphinx-guides/source/installation/advanced.rst
+++ b/doc/sphinx-guides/source/installation/advanced.rst
@@ -7,14 +7,16 @@ Advanced installations are not officially supported but here we are at least doc
 .. contents:: |toctitle|
 	:local:
 
+.. _multiple-app-servers:
+
 Multiple App Servers
 --------------------
 
 You should be conscious of the following when running multiple app servers.
 
 - Only one app server can be the dedicated timer server, as explained in the :doc:`/admin/timers` section of the Admin Guide.
-- When users upload a logo or footer for their Dataverse collection using the "theme" feature described in the :doc:`/user/dataverse-management` section of the User Guide, these logos are stored only on the app server the user happened to be on when uploading the logo. By default these logos and footers are written to the directory ``/usr/local/payara5/glassfish/domains/domain1/docroot/logos``.
-- When a sitemap is created by an app server it is written to the filesystem of just that app server. By default the sitemap is written to the directory ``/usr/local/payara5/glassfish/domains/domain1/docroot/sitemap``.
+- When users upload a logo or footer for their Dataverse collection using the "theme" feature described in the :doc:`/user/dataverse-management` section of the User Guide, these logos are stored only on the app server the user happened to be on when uploading the logo. By default these logos and footers are written to the directory ``/usr/local/payara6/glassfish/domains/domain1/docroot/logos``.
+- When a sitemap is created by an app server it is written to the filesystem of just that app server. By default the sitemap is written to the directory ``/usr/local/payara6/glassfish/domains/domain1/docroot/sitemap``.
 - If Make Data Count is used, its raw logs must be copied from each app server to single instance of Counter Processor. See also :ref:`:MDCLogPath` section in the Configuration section of this guide and the :doc:`/admin/make-data-count` section of the Admin Guide.
 - Dataset draft version logging occurs separately on each app server. See :ref:`edit-draft-versions-logging` section in Monitoring of the Admin Guide for details.
 - Password aliases (``dataverse.db.password``, etc.) are stored per app server.
@@ -115,3 +117,29 @@ To activate in your Dataverse installation::
 
    curl -X PUT -d '/cgi-bin/zipdownload' http://localhost:8080/api/admin/settings/:CustomZipDownloadServiceUrl
 
+.. _external-exporters:
+
+Installing External Metadata Exporters
+++++++++++++++++++++++++++++++++++++++
+
+As of Dataverse Software 5.14 Dataverse supports the use of external Exporters as a way to add additional metadata
+export formats to Dataverse or replace the built-in formats. This should be considered an **experimental** capability
+in that the mechanism is expected to evolve and using it may require additional effort when upgrading to new Dataverse
+versions.
+
+This capability is enabled by specifying a directory in which Dataverse should look for third-party Exporters. See
+:ref:`dataverse.spi.exporters.directory`.
+
+See :doc:`/developers/metadataexport` for details about how to develop new Exporters.
+
+An minimal example Exporter is available at https://github.com/gdcc/dataverse-exporters. The community is encourage to 
+add additional exporters (and/or links to exporters elsewhere) in this repository. Once you have downloaded the 
+dataverse-spi-export-examples-1.0.0.jar (or other exporter jar), installed it in the directory specified above, and 
+restarted your Payara server, the new exporter should be available. 
+
+The example dataverse-spi-export-examples-1.0.0.jar replaces the ``JSON`` export with a ``MyJSON in <locale>`` version
+that just wraps the existing JSON export object in a new JSON object with the key ``inputJson`` containing the original
+JSON.(Note that the ``MyJSON in <locale>`` label will appear in the dataset Metadata Export download menu immediately,
+but the content for already published datasets will only be updated after you delete the cached exports and/or use a
+reExport API call (see :ref:`batch-exports-through-the-api`).)
+
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index ee89b718777..a7d7905ca4a 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -1,4 +1,3 @@
-=============
 Configuration
 =============
 
@@ -34,6 +33,12 @@ It is very important to keep the block in place for the "admin" endpoint, and to
 
 It's also possible to prevent file uploads via API by adjusting the :ref:`:UploadMethods` database setting.
 
+If you are using a load balancer or a reverse proxy, there are some additional considerations. If no additional configurations are made and the upstream is configured to redirect to localhost, the API will be accessible from the outside, as your installation will register as origin the localhost for any requests to the endpoints "admin" and "builtin-users". To prevent this, you have two options:
+
+- If your upstream is configured to redirect to localhost, you will need to set the :ref:`JVM option <useripaddresssourceheader>` to one of the following values ``%client.name% %datetime% %request% %status% %response.length% %header.referer% %header.x-forwarded-for%`` and configure from the load balancer side the chosen header to populate with the client IP address.
+
+- Another solution is to set the upstream to the client IP address. In this case no further configuration is needed.
+
 Forcing HTTPS
 +++++++++++++
 
@@ -137,7 +142,7 @@ The need to redirect port HTTP (port 80) to HTTPS (port 443) for security has al
 
 Your decision to proxy or not should primarily be driven by which features of the Dataverse Software you'd like to use. If you'd like to use Shibboleth, the decision is easy because proxying or "fronting" Payara with Apache is required. The details are covered in the :doc:`shibboleth` section.
 
-Even if you have no interest in Shibboleth, you may want to front your Dataverse installation with Apache or nginx to simply the process of installing SSL certificates. There are many tutorials on the Internet for adding certs to Apache, including a some `notes used by the Dataverse Project team <https://github.com/IQSS/dataverse/blob/v4.6.1/doc/shib/shib.md>`_, but the process of adding a certificate to Payara is arduous and not for the faint of heart. The Dataverse Project team cannot provide much help with adding certificates to Payara beyond linking to `tips <http://stackoverflow.com/questions/906402/importing-an-existing-x509-certificate-and-private-key-in-java-keystore-to-use-i>`_ on the web.
+Even if you have no interest in Shibboleth, you may want to front your Dataverse installation with Apache or nginx to simply the process of installing SSL certificates. There are many tutorials on the Internet for adding certs to Apache, including a some `notes used by the Dataverse Project team <https://github.com/IQSS/dataverse/blob/v4.6.1/doc/shib/shib.md>`_, but the process of adding a certificate to Payara is arduous and not for the faint of heart. The Dataverse Project team cannot provide much help with adding certificates to Payara beyond linking to `tips <https://stackoverflow.com/questions/906402/importing-an-existing-x509-certificate-and-private-key-in-java-keystore-to-use-i>`_ on the web.
 
 Still not convinced you should put Payara behind another web server? Even if you manage to get your SSL certificate into Payara, how are you going to run Payara on low ports such as 80 and 443? Are you going to run Payara as root? Bad idea. This is a security risk. Under "Additional Recommendations" under "Securing Your Installation" above you are advised to configure Payara to run as a user other than root.
 
@@ -149,7 +154,7 @@ If you really don't want to front Payara with any proxy (not recommended), you c
 
 ``./asadmin set server-config.network-config.network-listeners.network-listener.http-listener-2.port=443``
 
-What about port 80? Even if you don't front your Dataverse installation with Apache, you may want to let Apache run on port 80 just to rewrite HTTP to HTTPS as described above. You can use a similar command as above to change the HTTP port that Payara uses from 8080 to 80 (substitute ``http-listener-1.port=80``). Payara can be used to enforce HTTPS on its own without Apache, but configuring this is an exercise for the reader. Answers here may be helpful: http://stackoverflow.com/questions/25122025/glassfish-v4-java-7-port-unification-error-not-able-to-redirect-http-to
+What about port 80? Even if you don't front your Dataverse installation with Apache, you may want to let Apache run on port 80 just to rewrite HTTP to HTTPS as described above. You can use a similar command as above to change the HTTP port that Payara uses from 8080 to 80 (substitute ``http-listener-1.port=80``). Payara can be used to enforce HTTPS on its own without Apache, but configuring this is an exercise for the reader. Answers here may be helpful: https://stackoverflow.com/questions/25122025/glassfish-v4-java-7-port-unification-error-not-able-to-redirect-http-to
 
 If you are running an installation with Apache and Payara on the same server, and would like to restrict Payara from responding to any requests to port 8080 from external hosts (in other words, not through Apache), you can restrict the AJP listener to localhost only with:
 
@@ -166,39 +171,79 @@ In order for non-superusers to start creating Dataverse collections or datasets,
 
 As the person installing the Dataverse Software, you may or may not be a local metadata expert. You may want to have others sign up for accounts and grant them the "Admin" role at the root Dataverse collection to configure metadata fields, templates, browse/search facets, guestbooks, etc. For more on these topics, consult the :doc:`/user/dataverse-management` section of the User Guide.
 
+.. _pids-configuration:
+
 Persistent Identifiers and Publishing Datasets
 ----------------------------------------------
 
-Persistent identifiers are a required and integral part of the Dataverse Software. They provide a URL that is guaranteed to resolve to the datasets or files they represent. The Dataverse Software currently supports creating identifiers using DOI and Handle.
+Persistent identifiers (PIDs) are a required and integral part of the Dataverse Software. They provide a URL that is
+guaranteed to resolve to the datasets or files they represent. The Dataverse Software currently supports creating
+identifiers using one of several PID providers. The most appropriate PIDs for public data are DOIs (provided by
+DataCite or EZID) and Handles. Dataverse also supports PermaLinks which could be useful for intranet or catalog use
+cases. A DOI provider called "FAKE" is recommended only for testing and development purposes.
 
-By default, the installer configures a default DOI namespace (10.5072) with DataCite as the registration provider. Please note that as of the release 4.9.3, we can no longer use EZID as the provider. Unlike EZID, DataCite requires that you register for a test account, configured with your own prefix (please contact support@datacite.org). Once you receive the login name, password, and prefix for the account, configure the credentials in your domain.xml, as the following two JVM options::
+Testing PID Providers
++++++++++++++++++++++
+
+By default, the installer configures the DataCite test service as the registration provider. DataCite requires that you
+register for a test account, configured with your own prefix (please contact support@datacite.org).
 
-      <jvm-options>-Ddoi.username=...</jvm-options>
-      <jvm-options>-Ddoi.password=...</jvm-options>
+Once you receive the login name, password, and prefix for the account,
+configure the credentials via :ref:`dataverse.pid.datacite.username` and
+:ref:`dataverse.pid.datacite.password`, then restart Payara.
 
-and restart Payara. The prefix can be configured via the API (where it is referred to as "Authority"):
+Configure the prefix via the API (where it is referred to as :ref:`:Authority`):
 
 ``curl -X PUT -d 10.xxxx http://localhost:8080/api/admin/settings/:Authority``
 
-Once this is done, you will be able to publish datasets and files, but the persistent identifiers will not be citable, and they will only resolve from the DataCite test environment (and then only if the Dataverse installation from which you published them is accessible - DOIs minted from your laptop will not resolve). Note that any datasets or files created using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured.
+.. TIP::
+  This testing section is oriented around DataCite but other PID Providers can be tested as well.
+  
+  - EZID is available to University of California scholars and researchers. Testing can be done using the authority 10.5072 and shoulder FK2 with the "apitest" account (contact EZID for credentials) or an institutional account. Configuration in Dataverse is then analogous to using DataCite.
+   
+  - The PermaLink and FAKE DOI providers do not involve an external account. See :ref:`permalinks` and (for the FAKE DOI provider) the :doc:`/developers/dev-environment` section of the Developer Guide.
+
+Once all is configured, you will be able to publish datasets and files, but **the persistent identifiers will not be citable**,
+and they will only resolve from the DataCite test environment (and then only if the Dataverse installation from which
+you published them is accessible - DOIs minted from your laptop will not resolve). Note that any datasets or files
+created using the test configuration cannot be directly migrated and would need to be created again once a valid DOI
+namespace is configured.
 
-To properly configure persistent identifiers for a production installation, an account and associated namespace must be acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider (see https://dataversecommunity.global for more on joining DataCite) but **EZID** (http://ezid.cdlib.org) is an option for the University of California according to https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ . **Handle.Net** (https://www.handle.net) is the HDL provider.
+One you are done testing, to properly configure persistent identifiers for a production installation, an account and associated namespace must be
+acquired for a fee from a DOI or HDL provider. **DataCite** (https://www.datacite.org) is the recommended DOI provider
+(see https://dataversecommunity.global for more on joining DataCite through the Global Dataverse Community Consortium) but **EZID**
+(http://ezid.cdlib.org) is an option for the University of California according to
+https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/ .
+**Handle.Net** (https://www.handle.net) is the HDL provider.
 
-Once you have your DOI or Handle account credentials and a namespace, configure your Dataverse installation to use them using the JVM options and database settings below.
+Once you have your DOI or Handle account credentials and a namespace, configure your Dataverse installation
+using the JVM options and database settings below.
+
+.. _pids-doi-configuration:
 
 Configuring Your Dataverse Installation for DOIs
 ++++++++++++++++++++++++++++++++++++++++++++++++
 
-By default, your Dataverse installation attempts to register DOIs for each dataset and file under a test authority, though you must apply for your own credentials as explained above.
+As explained above, by default your Dataverse installation attempts to register DOIs for each
+dataset and file under a test authority. You must apply for your own credentials.
 
 Here are the configuration options for DOIs:
 
-**JVM Options:**
+**JVM Options for DataCite:**
+
+- :ref:`dataverse.pid.datacite.mds-api-url`
+- :ref:`dataverse.pid.datacite.rest-api-url`
+- :ref:`dataverse.pid.datacite.username`
+- :ref:`dataverse.pid.datacite.password`
+
+**JVM Options for EZID:**
+
+As stated above, with very few exceptions (e.g. University of California), you will not be able to use
+this provider.
 
-- :ref:`doi.baseurlstring`
-- :ref:`doi.username`
-- :ref:`doi.password`
-- :ref:`doi.dataciterestapiurlstring`
+- :ref:`dataverse.pid.ezid.api-url`
+- :ref:`dataverse.pid.ezid.username`
+- :ref:`dataverse.pid.ezid.password`
 
 **Database Settings:**
 
@@ -208,18 +253,21 @@ Here are the configuration options for DOIs:
 - :ref:`:Shoulder <:Shoulder>`
 - :ref:`:IdentifierGenerationStyle <:IdentifierGenerationStyle>` (optional)
 - :ref:`:DataFilePIDFormat <:DataFilePIDFormat>` (optional)
-- :ref:`:FilePIDsEnabled <:FilePIDsEnabled>` (optional, defaults to true)
+- :ref:`:FilePIDsEnabled <:FilePIDsEnabled>` (optional, defaults to false)
+
+.. _pids-handle-configuration:
 
 Configuring Your Dataverse Installation for Handles
 +++++++++++++++++++++++++++++++++++++++++++++++++++
 
-Here are the configuration options for handles:
+Here are the configuration options for handles. Most notably, you need to
+change the ``:Protocol`` setting, as it defaults to DOI usage.
 
 **JVM Options:**
 
-- :ref:`dataverse.handlenet.admcredfile`
-- :ref:`dataverse.handlenet.admprivphrase`
-- :ref:`dataverse.handlenet.index`
+- :ref:`dataverse.pid.handlenet.key.path`
+- :ref:`dataverse.pid.handlenet.key.passphrase`
+- :ref:`dataverse.pid.handlenet.index`
 
 **Database Settings:**
 
@@ -230,7 +278,33 @@ Here are the configuration options for handles:
 - :ref:`:IndependentHandleService <:IndependentHandleService>` (optional)
 - :ref:`:HandleAuthHandle <:HandleAuthHandle>` (optional)
 
-Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation <http://handle.net/hnr_documentation.html>`_.
+Note: If you are **minting your own handles** and plan to set up your own handle service, please refer to `Handle.Net documentation <https://handle.net/hnr_documentation.html>`_.
+
+.. _permalinks:
+
+Configuring Your Dataverse Installation for PermaLinks
+++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+PermaLinks are a simple mechanism to provide persistent URLs for datasets and datafiles (if configured) that does not involve an external service providing metadata-based search services.
+They are potentially appropriate for Intranet use cases as well as in cases where Dataverse is being used as a catalog or holding duplicate copies of datasets where the authoritative copy already has a DOI or Handle.
+PermaLinks use the protocol "perma" (versus "doi" or "handle") and do not use a "/" character as a separator between the authority and shoulder. It is recommended to choose an alphanumeric value for authority that does not resemble that of DOIs (which are primarily numeric and start with "10." as in "10.5072") to avoid PermaLinks being mistaken for DOIs.
+
+Here are the configuration options for PermaLinks:
+
+**JVM Options:**
+
+- :ref:`dataverse.pid.permalink.base-url`
+
+**Database Settings:**
+
+- :ref:`:Protocol <:Protocol>`
+- :ref:`:Authority <:Authority>`
+- :ref:`:Shoulder <:Shoulder>`
+- :ref:`:IdentifierGenerationStyle <:IdentifierGenerationStyle>` (optional)
+- :ref:`:DataFilePIDFormat <:DataFilePIDFormat>` (optional)
+- :ref:`:FilePIDsEnabled <:FilePIDsEnabled>` (optional, defaults to false)
+
+You must restart Payara after making changes to these settings.
 
 .. _auth-modes:
 
@@ -263,6 +337,19 @@ As for the "Remote only" authentication mode, it means that:
 - ``:DefaultAuthProvider`` has been set to use the desired authentication provider
 - The "builtin" authentication provider has been disabled (:ref:`api-toggle-auth-provider`). Note that disabling the "builtin" authentication provider means that the API endpoint for converting an account from a remote auth provider will not work. Converting directly from one remote authentication provider to another (i.e. from GitHub to Google) is not supported. Conversion from remote is always to "builtin". Then the user initiates a conversion from "builtin" to remote. Note that longer term, the plan is to permit multiple login options to the same Dataverse installation account per https://github.com/IQSS/dataverse/issues/3487 (so all this talk of conversion will be moot) but for now users can only use a single login option, as explained in the :doc:`/user/account` section of the User Guide. In short, "remote only" might work for you if you only plan to use a single remote authentication provider such that no conversion between remote authentication providers will be necessary.
 
+.. _bearer-token-auth:
+
+Bearer Token Authentication
+---------------------------
+
+Bearer tokens are defined in `RFC 6750`_ and can be used as an alternative to API tokens. This is an experimental feature hidden behind a feature flag.
+
+.. _RFC 6750: https://tools.ietf.org/html/rfc6750
+
+To enable bearer tokens, you must install and configure Keycloak (for now, see :ref:`oidc-dev` in the Developer Guide) and enable ``api-bearer-auth`` under :ref:`feature-flags`.
+
+You can test that bearer tokens are working by following the example under :ref:`bearer-tokens` in the API Guide.
+
 .. _database-persistence:
 
 Database Persistence
@@ -412,14 +499,18 @@ Logging & Slow Performance
 
 .. _file-storage:
 
-File Storage: Using a Local Filesystem and/or Swift and/or Object Stores and/or Trusted Remote Stores
------------------------------------------------------------------------------------------------------
+File Storage
+------------
+
+By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara6/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\<id\>.directory`` JVM option described below.
 
-By default, a Dataverse installation stores all data files (files uploaded by end users) on the filesystem at ``/usr/local/payara5/glassfish/domains/domain1/files``. This path can vary based on answers you gave to the installer (see the :ref:`dataverse-installer` section of the Installation Guide) or afterward by reconfiguring the ``dataverse.files.\<id\>.directory`` JVM option described below.
+A Dataverse installation can alternately store files in a Swift or S3-compatible object store, or on a Globus endpoint, and can now be configured to support multiple stores at once. With a multi-store configuration, the location for new files can be controlled on a per-Dataverse collection basis.
 
-A Dataverse installation can alternately store files in a Swift or S3-compatible object store, and can now be configured to support multiple stores at once. With a multi-store configuration, the location for new files can be controlled on a per-Dataverse collection basis.
+A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web or Globus accessible trusted remote store.
 
-A Dataverse installation may also be configured to reference some files (e.g. large and/or sensitive data) stored in a web-accessible trusted remote store.
+A Dataverse installation can be configured to allow out of band upload by setting the ``dataverse.files.\<id\>.upload-out-of-band`` JVM option to ``true``.
+By default, Dataverse supports uploading files via the :ref:`add-file-api`. With S3 stores, a direct upload process can be enabled to allow sending the file directly to the S3 store (without any intermediate copies on the Dataverse server).
+With the upload-out-of-band option enabled, it is also possible for file upload to be managed manually or via third-party tools, with the :ref:`Adding the Uploaded file to the Dataset <direct-add-to-dataset-api>` API call (described in the :doc:`/developers/s3-direct-upload-api` page) used to add metadata and inform Dataverse that a new file has been added to the relevant store.
 
 The following sections describe how to set up various types of stores and how to configure for multiple stores.
 
@@ -448,6 +539,27 @@ If you wish to change which store is used by default, you'll need to delete the
 
 It is also possible to set maximum file upload size limits per store. See the :ref:`:MaxFileUploadSizeInBytes` setting below.
 
+.. _labels-file-stores:
+
+Labels for File Stores
+++++++++++++++++++++++
+
+If you find yourself adding many file stores with various configurations such as per-file limits and direct upload, you might find it helpful to make the label descriptive.
+
+For example, instead of simply labeling an S3 store as "S3"...
+
+.. code-block:: none
+
+    ./asadmin create-jvm-options "\-Ddataverse.files.s3xl.label=S3"
+
+... you might want to include some extra information such as the example below.
+
+.. code-block:: none
+
+    ./asadmin create-jvm-options "\-Ddataverse.files.s3xl.label=S3XL, Filesize limit: 100GB, direct-upload"
+
+Please keep in mind that the UI will only show so many characters, so labels are best kept short.
+
 .. _storage-files-dir:
 
 File Storage
@@ -464,7 +576,7 @@ Multiple file stores should specify different directories (which would nominally
 Swift Storage
 +++++++++++++
 
-Rather than storing data files on the filesystem, you can opt for an experimental setup with a `Swift Object Storage <http://swift.openstack.org>`_ backend. Each dataset that users create gets a corresponding "container" on the Swift side, and each data file is saved as a file within that container.
+Rather than storing data files on the filesystem, you can opt for an experimental setup with a `Swift Object Storage <https://swift.openstack.org>`_ backend. Each dataset that users create gets a corresponding "container" on the Swift side, and each data file is saved as a file within that container.
 
 **In order to configure a Swift installation,** you need to complete these steps to properly modify the JVM options:
 
@@ -480,7 +592,7 @@ First, run all the following create commands with your Swift endpoint informatio
     ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files.<id>.username.endpoint1=your-username"
     ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.files.<id>.endpoint.endpoint1=your-swift-endpoint"
 
-``auth_type`` can either be ``keystone``, ``keystone_v3``, or it will assumed to be ``basic``. ``auth_url`` should be your keystone authentication URL which includes the tokens (e.g. for keystone, ``https://openstack.example.edu:35357/v2.0/tokens`` and for keystone_v3, ``https://openstack.example.edu:35357/v3/auth/tokens``). ``swift_endpoint`` is a URL that looks something like ``http://rdgw.swift.example.org/swift/v1``.
+``auth_type`` can either be ``keystone``, ``keystone_v3``, or it will assumed to be ``basic``. ``auth_url`` should be your keystone authentication URL which includes the tokens (e.g. for keystone, ``https://openstack.example.edu:35357/v2.0/tokens`` and for keystone_v3, ``https://openstack.example.edu:35357/v3/auth/tokens``). ``swift_endpoint`` is a URL that looks something like ``https://rdgw.swift.example.org/swift/v1``.
 
 Then create a password alias by running (without changes):
 
@@ -576,17 +688,17 @@ You'll need an AWS account with an associated S3 bucket for your installation to
 **Make note** of the **bucket's name** and the **region** its data is hosted in.
 
 To **create a user** with full S3 access and nothing more for security reasons, we recommend using IAM
-(Identity and Access Management). See `IAM User Guide <http://docs.aws.amazon.com/IAM/latest/UserGuide/id_users.html>`_
+(Identity and Access Management). See `IAM User Guide <https://docs.aws.amazon.com/IAM/latest/UserGuide/id_users.html>`_
 for more info on this process.
 
-**Generate the user keys** needed for a Dataverse installation afterwards by clicking on the created user.
+To use programmatic access, **Generate the user keys** needed for a Dataverse installation afterwards by clicking on the created user.
 (You can skip this step when running on EC2, see below.)
 
 .. TIP::
   If you are hosting your Dataverse installation on an AWS EC2 instance alongside storage in S3, it is possible to use IAM Roles instead
   of the credentials file (the file at ``~/.aws/credentials`` mentioned below). Please note that you will still need the
   ``~/.aws/config`` file to specify the region. For more information on this option, see
-  http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
+  https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2.html
 
 Preparation When Using Custom S3-Compatible Service
 ###################################################
@@ -647,7 +759,7 @@ Additional profiles can be added to these files by appending the relevant inform
   aws_access_key_id = <insert key, no brackets>
   aws_secret_access_key = <insert secret key, no brackets>
 
-Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Installation on Payara. (From the `AWS Command Line Interface Documentation <http://docs.aws.amazon.com/cli/latest/userguide/cli-config-files.html>`_:
+Place these two files in a folder named ``.aws`` under the home directory for the user running your Dataverse Installation on Payara. (From the `AWS Command Line Interface Documentation <https://docs.aws.amazon.com/cli/latest/userguide/cli-config-files.html>`_:
 "In order to separate credentials from less sensitive options, region and output format are stored in a separate file
 named config in the same folder")
 
@@ -713,27 +825,28 @@ List of S3 Storage Options
 .. table::
     :align: left
 
-    ===========================================  ==================  ==========================================================================  =============
-    JVM Option                                   Value               Description                                                                 Default value
-    ===========================================  ==================  ==========================================================================  =============
-    dataverse.files.storage-driver-id            <id>                Enable <id> as the default storage driver.                                  ``file``
-    dataverse.files.<id>.type                    ``s3``              **Required** to mark this storage as S3 based.                              (none)
-    dataverse.files.<id>.label                   <?>                 **Required** label to be shown in the UI for this storage                   (none)
-    dataverse.files.<id>.bucket-name             <?>                 The bucket name. See above.                                                 (none)
-    dataverse.files.<id>.download-redirect       ``true``/``false``  Enable direct download or proxy through Dataverse.                          ``false``
-    dataverse.files.<id>.upload-redirect         ``true``/``false``  Enable direct upload of files added to a dataset  to the S3 store.          ``false``
-    dataverse.files.<id>.ingestsizelimit         <size in bytes>     Maximum size of directupload files that should be ingested                  (none)
-    dataverse.files.<id>.url-expiration-minutes  <?>                 If direct uploads/downloads: time until links expire. Optional.             60
-    dataverse.files.<id>.min-part-size           <?>                 Multipart direct uploads will occur for files larger than this. Optional.   ``1024**3``
-    dataverse.files.<id>.custom-endpoint-url     <?>                 Use custom S3 endpoint. Needs URL either with or without protocol.          (none)
-    dataverse.files.<id>.custom-endpoint-region  <?>                 Only used when using custom endpoint. Optional.                             ``dataverse``
-    dataverse.files.<id>.profile                 <?>                 Allows the use of AWS profiles for storage spanning multiple AWS accounts.  (none)
-    dataverse.files.<id>.proxy-url               <?>                 URL of a proxy protecting the S3 store. Optional.                           (none)
-    dataverse.files.<id>.path-style-access       ``true``/``false``  Use path style buckets instead of subdomains. Optional.                     ``false``
-    dataverse.files.<id>.payload-signing         ``true``/``false``  Enable payload signing. Optional                                            ``false``
-    dataverse.files.<id>.chunked-encoding        ``true``/``false``  Disable chunked encoding. Optional                                          ``true``
-    dataverse.files.<id>.connection-pool-size    <?>                 The maximum number of open connections to the S3 server                     ``256``
-    ===========================================  ==================  ==========================================================================  =============
+    ===========================================  ==================  ===================================================================================  =============
+    JVM Option                                   Value               Description                                                                          Default value
+    ===========================================  ==================  ===================================================================================  =============
+    dataverse.files.storage-driver-id            <id>                Enable <id> as the default storage driver.                                           ``file``
+    dataverse.files.<id>.type                    ``s3``              **Required** to mark this storage as S3 based.                                       (none)
+    dataverse.files.<id>.label                   <?>                 **Required** label to be shown in the UI for this storage                            (none)
+    dataverse.files.<id>.bucket-name             <?>                 The bucket name. See above.                                                          (none)
+    dataverse.files.<id>.download-redirect       ``true``/``false``  Enable direct download or proxy through Dataverse.                                   ``false``
+    dataverse.files.<id>.upload-redirect         ``true``/``false``  Enable direct upload of files added to a dataset in the S3 store.                    ``false``
+    dataverse.files.<id>.upload-out-of-band      ``true``/``false``  Allow upload of files by out-of-band methods (using some tool other than Dataverse)  ``false``
+    dataverse.files.<id>.ingestsizelimit         <size in bytes>     Maximum size of directupload files that should be ingested                           (none)
+    dataverse.files.<id>.url-expiration-minutes  <?>                 If direct uploads/downloads: time until links expire. Optional.                      60
+    dataverse.files.<id>.min-part-size           <?>                 Multipart direct uploads will occur for files larger than this. Optional.            ``1024**3``
+    dataverse.files.<id>.custom-endpoint-url     <?>                 Use custom S3 endpoint. Needs URL either with or without protocol.                   (none)
+    dataverse.files.<id>.custom-endpoint-region  <?>                 Only used when using custom endpoint. Optional.                                      ``dataverse``
+    dataverse.files.<id>.profile                 <?>                 Allows the use of AWS profiles for storage spanning multiple AWS accounts.           (none)
+    dataverse.files.<id>.proxy-url               <?>                 URL of a proxy protecting the S3 store. Optional.                                    (none)
+    dataverse.files.<id>.path-style-access       ``true``/``false``  Use path style buckets instead of subdomains. Optional.                              ``false``
+    dataverse.files.<id>.payload-signing         ``true``/``false``  Enable payload signing. Optional                                                     ``false``
+    dataverse.files.<id>.chunked-encoding        ``true``/``false``  Disable chunked encoding. Optional                                                   ``true``
+    dataverse.files.<id>.connection-pool-size    <?>                 The maximum number of open connections to the S3 server                              ``256``
+    ===========================================  ==================  ===================================================================================  =============
 
 .. table::
     :align: left
@@ -773,7 +886,7 @@ You may provide the values for these via any `supported MicroProfile Config API
 Reported Working S3-Compatible Storage
 ######################################
 
-`Minio v2018-09-12 <http://minio.io>`_
+`Minio v2018-09-12 <https://minio.io>`_
   Set ``dataverse.files.<id>.path-style-access=true``, as Minio works path-based. Works pretty smooth, easy to setup.
   **Can be used for quick testing, too:** just use the example values above. Uses the public (read: unsecure and
   possibly slow) https://play.minio.io:9000 service.
@@ -866,7 +979,7 @@ Once you have configured a trusted remote store, you can point your users to the
     dataverse.files.<id>.type                    ``remote``          **Required** to mark this storage as remote.                                (none)
     dataverse.files.<id>.label                   <?>                 **Required** label to be shown in the UI for this storage.                  (none)
     dataverse.files.<id>.base-url                <?>                 **Required** All files must have URLs of the form <baseUrl>/* .             (none)
-    dataverse.files.<id>.base-store              <?>                 **Optional** The id of a base store (of type file, s3, or swift).           (the default store)
+    dataverse.files.<id>.base-store              <?>                 **Required** The id of a base store (of type file, s3, or swift).           (the default store)
     dataverse.files.<id>.download-redirect       ``true``/``false``  Enable direct download (should usually be true).                            ``false``
     dataverse.files.<id>.secret-key               <?>                 A key used to sign download requests sent to the remote store. Optional.   (none)
     dataverse.files.<id>.url-expiration-minutes  <?>                 If direct downloads and using signing: time until links expire. Optional.   60
@@ -875,6 +988,47 @@ Once you have configured a trusted remote store, you can point your users to the
     
     ===========================================  ==================  ==========================================================================  ===================
 
+.. _globus-storage:
+
+Globus Storage
+++++++++++++++
+
+Globus stores allow Dataverse to manage files stored in Globus endpoints or to reference files in remote Globus endpoints, with users leveraging Globus to transfer files to/from Dataverse (rather than using HTTP/HTTPS).
+See :doc:`/developers/big-data-support` for additional information on how to use a globus store. Consult the `Globus documentation <https://docs.globus.org/>`_ for information about using Globus and configuring Globus endpoints.
+
+In addition to having the type "globus" and requiring a label, Globus Stores share many options with Trusted Remote Stores and options to specify and access a Globus endpoint(s). As with Remote Stores, Globus Stores also use a baseStore - a file, s3, or swift store that can be used to store additional ancillary dataset files (e.g. metadata exports, thumbnails, auxiliary files, etc.).
+These and other available options are described in the table below.
+
+There are two types of Globus stores:
+
+- managed - where Dataverse manages the Globus endpoint, deciding where transferred files are stored and managing access control for users transferring files to/from Dataverse
+- remote - where Dataverse references files that remain on trusted remote Globus endpoints
+
+A managed Globus store connects to standard/file-based Globus endpoint. It is also possible to configure an S3 store as a managed store, if the managed endpoint uses an underlying S3 store via the Globus S3 Connector.
+With the former, Dataverse has no direct access to the file contents and functionality related to ingest, fixity hash validation, etc. are not available. With the latter, Dataverse can access files internally via S3 and the functionality supported is similar to that when using S3 direct upload. 
+
+Once you have configured a globus store, or configured an S3 store for Globus access, it is recommended that you install the `dataverse-globus app <https://github.com/scholarsportal/dataverse-globus>`_ to allow transfers in/out of Dataverse to be initated via the Dataverse user interface. Alternately, you can point your users to the :doc:`/developers/globus-api` for information about API support.
+
+.. table::
+    :align: left
+
+    =======================================================  ==================  ==========================================================================  ===================
+    JVM Option                                               Value               Description                                                                 Default value
+    =======================================================  ==================  ==========================================================================  ===================
+    dataverse.files.<id>.type                                ``globus``          **Required** to mark this storage as globus enabled.                        (none)
+    dataverse.files.<id>.label                               <?>                 **Required** label to be shown in the UI for this storage.                  (none)
+    dataverse.files.<id>.base-store                          <?>                 **Required** The id of a base store (of type file, s3, or swift).           (the default store)
+    dataverse.files.<id>.remote-store-name                   <?>                 A short name used in the UI to indicate where a file is located. Optional.  (none)
+    dataverse.files.<id>.remote-store-url                    <?>                 A url to an info page about the remote store used in the UI. Optional.      (none)
+    dataverse.files.<id>.managed                             ``true``/``false``  Whether dataverse manages an associated Globus endpoint                     ``false``
+    dataverse.files.<id>.transfer-endpoint-with-basepath     <?>                 The *managed* Globus endpoint id and associated base path for file storage  (none)
+    dataverse.files.<id>.globus-token                        <?>                 A Globus token (base64 endcoded <Globus user id>:<Credential> 
+                                                                                 for a managed store) - using a microprofile alias is recommended            (none)
+    dataverse.files.<id>.reference-endpoints-with-basepaths  <?>                 A comma separated list of *remote* trusted Globus endpoint id/<basePath>s   (none)
+    dataverse.files.<id>.files-not-accessible-by-dataverse   ``true``/``false``  Should be false for S3 Connector-based *managed* stores, true for others    ``false``
+    
+    =======================================================  ==================  ==========================================================================  ===================
+    
 .. _temporary-file-storage:
 
 Temporary Upload File Storage
@@ -889,7 +1043,7 @@ All of these processes are triggered after finishing transfers over the wire and
 Before being moved there,
 
 - JSF Web UI uploads are stored at :ref:`${dataverse.files.uploads} <dataverse.files.uploads>`, defaulting to
-  ``/usr/local/payara5/glassfish/domains/domain1/uploads`` folder in a standard installation. This place is
+  ``/usr/local/payara6/glassfish/domains/domain1/uploads`` folder in a standard installation. This place is
   configurable and might be set to a separate disk volume where stale uploads are purged periodically.
 - API uploads are stored at the system's temporary files location indicated by the Java system property
   ``java.io.tmpdir``, defaulting to ``/tmp`` on Linux. If this location is backed by a `tmpfs <https://www.kernel.org/doc/html/latest/filesystems/tmpfs.html>`_
@@ -967,7 +1121,7 @@ Custom Navbar Logo
 
 The Dataverse Software allows you to replace the default Dataverse Project icon and name branding in the navbar with your own custom logo. Note that this logo is separate from the logo used in the theme of the root Dataverse collection (see :ref:`theme`).
 
-The custom logo image file is expected to be small enough to fit comfortably in the navbar, no more than 50 pixels in height and 160 pixels in width. Create a ``navbar`` directory in your Payara ``logos`` directory and place your custom logo there. By default, your logo image file will be located at ``/usr/local/payara5/glassfish/domains/domain1/docroot/logos/navbar/logo.png``.
+The custom logo image file is expected to be small enough to fit comfortably in the navbar, no more than 50 pixels in height and 160 pixels in width. Create a ``navbar`` directory in your Payara ``logos`` directory and place your custom logo there. By default, your logo image file will be located at ``/usr/local/payara6/glassfish/domains/domain1/docroot/logos/navbar/logo.png``.
 
 Given this location for the custom logo image file, run this curl command to add it to your settings:
 
@@ -1184,6 +1338,8 @@ The list below depicts a set of tools that can be used to ease the amount of wor
 
 - `easyTranslationHelper <https://github.com/universidadeaveiro/easyTranslationHelper>`_, a tool developed by `University of Aveiro <https://www.ua.pt/>`_.
 
+- `Dataverse General User Interface Translation Guide for Weblate <https://doi.org/10.5281/zenodo.4807371>`_, a guide produced as part of the `SSHOC Dataverse Translation <https://www.sshopencloud.eu/news/workshop-notes-sshoc-dataverse-translation-follow-event/>`_ event.
+
 .. _Web-Analytics-Code:
 
 Web Analytics Code
@@ -1339,24 +1495,25 @@ BagIt file handler configuration settings:
 BagIt Export
 ------------
 
-Your Dataverse installation may be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant <https://www.rd-alliance.org/system/files/Research%20Data%20Repository%20Interoperability%20WG%20-%20Final%20Recommendations_reviewed_0.pdf>`_ zipped `BagIt <https://tools.ietf.org/html/draft-kunze-bagit-17>`_ archival Bags (sometimes called BagPacks) to `Chronopolis <https://libraries.ucsd.edu/chronopolis/>`_ via `DuraCloud <https://duraspace.org/duracloud/>`_ or alternately to any folder on the local filesystem.
+Your Dataverse installation may be configured to submit a copy of published Datasets, packaged as `Research Data Alliance conformant <https://www.rd-alliance.org/system/files/Research%20Data%20Repository%20Interoperability%20WG%20-%20Final%20Recommendations_reviewed_0.pdf>`_ zipped `BagIt <https://tools.ietf.org/html/draft-kunze-bagit-17>`_ archival Bags (sometimes called BagPacks) to one of several supported storage services.
+Supported services include `Chronopolis <https://libraries.ucsd.edu/chronopolis/>`_ via `DuraCloud <https://duraspace.org/duracloud/>`_, Google's Cloud, and any service that can provide an S3 interface or handle files transferred to a folder on the local filesystem.
 
-These archival Bags include all of the files and metadata in a given dataset version and are sufficient to recreate the dataset, e.g. in a new Dataverse instance, or postentially in another RDA-conformant repository.
+These archival Bags include all of the files and metadata in a given dataset version and are sufficient to recreate the dataset, e.g. in a new Dataverse instance, or potentially in another RDA-conformant repository. The `DVUploader <https://github.com/GlobalDataverseCommunityConsortium/dataverse-uploader>`_ includes functionality to recreate a Dataset from an archival Bag produced by Dataverse. (Note that this functionality is distinct from the :ref:`BagIt File Handler` upload files to an existing Dataset via the Dataverse user interface.)
 
 The Dataverse Software offers an internal archive workflow which may be configured as a PostPublication workflow via an admin API call to manually submit previously published Datasets and prior versions to a configured archive such as Chronopolis. The workflow creates a `JSON-LD <http://www.openarchives.org/ore/0.9/jsonld>`_ serialized `OAI-ORE <https://www.openarchives.org/ore/>`_ map file, which is also available as a metadata export format in the Dataverse Software web interface.
 
 At present, archiving classes include the DuraCloudSubmitToArchiveCommand, LocalSubmitToArchiveCommand, GoogleCloudSubmitToArchive, and S3SubmitToArchiveCommand , which all extend the AbstractSubmitToArchiveCommand and use the configurable mechanisms discussed below. (A DRSSubmitToArchiveCommand, which works with Harvard's DRS also exists and, while specific to DRS, is a useful example of how Archivers can support single-version-only semantics and support archiving only from specified collections (with collection specific parameters)). 
 
-All current options support the archival status APIs and the same status is available in the dataset page version table (for contributors/those who could view the unpublished dataset, with more detail available to superusers).
+All current options support the :ref:`Archival Status API` calls and the same status is available in the dataset page version table (for contributors/those who could view the unpublished dataset, with more detail available to superusers).
 
 .. _Duracloud Configuration:
 
 Duracloud Configuration
 +++++++++++++++++++++++
 
-Also note that while the current Chronopolis implementation generates the archival Bag and submits it to the archive's DuraCloud interface, the step to make a 'snapshot' of the space containing the archival Bag (and verify it's successful submission) are actions a curator must take in the DuraCloud interface.
+The current Chronopolis implementation generates the archival Bag and submits it to the archive's DuraCloud interface. The step to make a 'snapshot' of the space containing the archival Bag (and verify it's successful submission) are actions a curator must take in the DuraCloud interface.
 
-The minimal configuration to support an archiver integration involves adding a minimum of two Dataverse Software Keys and any required Payara jvm options. The example instructions here are specific to the DuraCloud Archiver\:
+The minimal configuration to support archiver integration involves adding a minimum of two Dataverse Software settings. Individual archivers may require additional settings and/or Payara jvm options and micro-profile settings. The example instructions here are specific to the DuraCloud Archiver\:
 
 \:ArchiverClassName - the fully qualified class to be used for archiving. For example:
 
@@ -1366,7 +1523,7 @@ The minimal configuration to support an archiver integration involves adding a m
 
 ``curl http://localhost:8080/api/admin/settings/:ArchiverSettings -X PUT -d ":DuraCloudHost, :DuraCloudPort, :DuraCloudContext, :BagGeneratorThreads"``
 
-The DPN archiver defines three custom settings, one of which is required (the others have defaults):
+The DuraCloud archiver defines three custom settings, one of which is required (the others have defaults):
 
 \:DuraCloudHost - the URL for your organization's Duracloud site. For example:
 
@@ -1432,7 +1589,7 @@ The Google Cloud Archiver also requires a key file that must be renamed to 'goog
 
 For example:
 
-``cp <your key file> /usr/local/payara5/glassfish/domains/domain1/files/googlecloudkey.json``
+``cp <your key file> /usr/local/payara6/glassfish/domains/domain1/files/googlecloudkey.json``
 
 .. _S3 Archiver Configuration:
 
@@ -1513,6 +1670,25 @@ The workflow id returned in this call (or available by doing a GET of /api/admin
 
 Once these steps are taken, new publication requests will automatically trigger submission of an archival copy to the specified archiver, Chronopolis' DuraCloud component in this example. For Chronopolis, as when using the API, it is currently the admin's responsibility to snap-shot the DuraCloud space and monitor the result. Failure of the workflow, (e.g. if DuraCloud is unavailable, the configuration is wrong, or the space for this dataset already exists due to a prior publication action or use of the API), will create a failure message but will not affect publication itself.
 
+.. _bag-info.txt:
+
+Configuring bag-info.txt
+++++++++++++++++++++++++
+
+Out of the box, placeholder values like below will be placed in bag-info.txt:
+
+.. code-block:: text
+
+  Source-Organization: Dataverse Installation (<Site Url>)
+  Organization-Address: <Full address>
+  Organization-Email: <Email address>
+
+To customize these values for your institution, use the following JVM options:
+
+- :ref:`dataverse.bagit.sourceorg.name`
+- :ref:`dataverse.bagit.sourceorg.address`
+- :ref:`dataverse.bagit.sourceorg.email`
+
 Going Live: Launching Your Production Deployment
 ------------------------------------------------
 
@@ -1548,7 +1724,7 @@ You have a couple of options for putting an updated robots.txt file into product
 
 For more of an explanation of ``ProxyPassMatch`` see the :doc:`shibboleth` section.
 
-If you are not fronting Payara with Apache you'll need to prevent Payara from serving the robots.txt file embedded in the war file by overwriting robots.txt after the war file has been deployed. The downside of this technique is that you will have to remember to overwrite robots.txt in the "exploded" war file each time you deploy the war file, which probably means each time you upgrade to a new version of the Dataverse Software. Furthermore, since the version of the Dataverse Software is always incrementing and the version can be part of the file path, you will need to be conscious of where on disk you need to replace the file. For example, for Dataverse Software 4.6.1 the path to robots.txt may be ``/usr/local/payara5/glassfish/domains/domain1/applications/dataverse-4.6.1/robots.txt`` with the version number ``4.6.1`` as part of the path.
+If you are not fronting Payara with Apache you'll need to prevent Payara from serving the robots.txt file embedded in the war file by overwriting robots.txt after the war file has been deployed. The downside of this technique is that you will have to remember to overwrite robots.txt in the "exploded" war file each time you deploy the war file, which probably means each time you upgrade to a new version of the Dataverse Software. Furthermore, since the version of the Dataverse Software is always incrementing and the version can be part of the file path, you will need to be conscious of where on disk you need to replace the file. For example, for Dataverse Software 4.6.1 the path to robots.txt may be ``/usr/local/payara6/glassfish/domains/domain1/applications/dataverse-4.6.1/robots.txt`` with the version number ``4.6.1`` as part of the path.
 
 Creating a Sitemap and Submitting it to Search Engines
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1561,7 +1737,7 @@ Create or update your sitemap by adding the following curl command to cron to ru
 
 This will create or update a file in the following location unless you have customized your installation directory for Payara:
 
-``/usr/local/payara5/glassfish/domains/domain1/docroot/sitemap/sitemap.xml``
+``/usr/local/payara6/glassfish/domains/domain1/docroot/sitemap/sitemap.xml``
 
 On Dataverse installation with many datasets, the creation or updating of the sitemap can take a while. You can check Payara's server.log file for "BEGIN updateSiteMap" and "END updateSiteMap" lines to know when the process started and stopped and any errors in between.
 
@@ -1604,7 +1780,12 @@ When changing values these values with ``asadmin``, you'll need to delete the ol
 
 ``./asadmin create-jvm-options "-Ddataverse.fqdn=dataverse.example.com"``
 
-It's also possible to change these values by stopping Payara, editing ``payara5/glassfish/domains/domain1/config/domain.xml``, and restarting Payara.
+It's also possible to change these values by stopping Payara, editing ``payara6/glassfish/domains/domain1/config/domain.xml``, and restarting Payara.
+
+In addition, JVM options enabled for "MicroProfile Config" (see docs of any option), can be used with any
+`supported MicroProfile Config API source`_ to provide their values. The most notable source are environment variables;
+many examples are given in detail documentation of enabled options.
+
 
 .. _dataverse.fqdn:
 
@@ -1673,8 +1854,8 @@ protocol, host, and port number and should not include a trailing slash.
 dataverse.files.directory
 +++++++++++++++++++++++++
 
-Please provide an absolute path to a directory backed by some mounted file system. This directory is used for a number
-of purposes:
+Providing an explicit location here makes it easier to reuse some mounted filesystem and we recommend doing so
+to avoid filled up disks, aid in performance, etc. This directory is used for a number of purposes:
 
 1. ``<dataverse.files.directory>/temp`` after uploading, data is temporarily stored here for ingest and/or before
    shipping to the final storage destination.
@@ -1687,21 +1868,51 @@ of purposes:
    under certain conditions. This directory may also be used by file stores for :ref:`permanent file storage <storage-files-dir>`,
    but this is controlled by other, store-specific settings.
 
-Defaults to ``/tmp/dataverse``. Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_DIRECTORY``.
+Notes:
+
+- Please provide an absolute path to a directory backed by some mounted file system.
+- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_DIRECTORY``.
+- Defaults to ``/tmp/dataverse`` in a :doc:`default installation <installation-main>`.
+- Defaults to ``${STORAGE_DIR}`` using our :ref:`Dataverse container <app-locations>` (resolving to ``/dv``).
+- During startup, this directory will be checked for existence and write access. It will be created for you
+  if missing. If it cannot be created or does not have proper write access, application deployment will fail.
 
 .. _dataverse.files.uploads:
 
 dataverse.files.uploads
 +++++++++++++++++++++++
 
-Configure a folder to store the incoming file stream during uploads (before transfering to `${dataverse.files.directory}/temp`).
+Configure a folder to store the incoming file stream during uploads (before transfering to ``${dataverse.files.directory}/temp``).
+Providing an explicit location here makes it easier to reuse some mounted filesystem.
 Please also see :ref:`temporary-file-storage` for more details.
-You can use an absolute path or a relative, which is relative to the application server domain directory.
 
-Defaults to ``./uploads``, which resolves to ``/usr/local/payara5/glassfish/domains/domain1/uploads`` in a default
-installation.
+Notes:
+
+- Please provide an absolute path to a directory backed by some mounted file system.
+- Defaults to ``${com.sun.aas.instanceRoot}/uploads`` in a :doc:`default installation <installation-main>`
+  (resolving to ``/usr/local/payara6/glassfish/domains/domain1/uploads``).
+- Defaults to ``${STORAGE_DIR}/uploads`` using our :ref:`Dataverse container <app-locations>` (resolving to ``/dv/uploads``).
+- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``.
+- During startup, this directory will be checked for existence and write access. It will be created for you
+  if missing. If it cannot be created or does not have proper write access, application deployment will fail.
 
-Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``.
+.. _dataverse.files.docroot:
+
+dataverse.files.docroot
++++++++++++++++++++++++
+
+Configure a folder to store and retrieve additional materials like user uploaded collection logos, generated sitemaps,
+and so on. Providing an explicit location here makes it easier to reuse some mounted filesystem.
+See also logo customization above.
+
+Notes:
+
+- Defaults to ``${com.sun.aas.instanceRoot}/docroot`` in a :doc:`default installation <installation-main>`
+  (resolves to ``/usr/local/payara6/glassfish/domains/domain1/docroot``).
+- Defaults to ``${STORAGE_DIR}/docroot`` using our :ref:`Dataverse container <app-locations>` (resolving to ``/dv/docroot``).
+- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_DOCROOT``.
+- During startup, this directory will be checked for existence and write access. It will be created for you
+  if missing. If it cannot be created or does not have proper write access, application deployment will fail.
 
 dataverse.auth.password-reset-timeout-in-minutes
 ++++++++++++++++++++++++++++++++++++++++++++++++
@@ -1900,92 +2111,255 @@ dataverse.dataAccess.thumbnail.pdf.limit
 
 For limiting the size (in bytes) of thumbnail images generated from files. The default is 1000000 bytes (1 MB).
 
-.. _doi.baseurlstring:
-
-doi.baseurlstring
-+++++++++++++++++
 
-As of this writing, "https://mds.datacite.org" (DataCite) and "https://ezid.cdlib.org" (EZID) are the main valid values.
+.. _dataverse.pid.datacite.mds-api-url:
 
-Out of the box, the Dataverse Software is configured to use a test MDS DataCite base URL string. You can delete it like this:
+dataverse.pid.datacite.mds-api-url
+++++++++++++++++++++++++++++++++++
 
-``./asadmin delete-jvm-options '-Ddoi.baseurlstring=https\://mds.test.datacite.org'``
+Configure the base URL of the `DataCite MDS API <https://support.datacite.org/reference/overview>`_,
+used to mint and manage DOIs. Valid values are "https://mds.datacite.org" and "https://mds.test.datacite.org"
+(see also note below).
 
-Then, to switch to production DataCite, you can issue the following command:
+Out of the box, the installer configures your installation to use a DataCite REST Test API base URL (see DataCite's `testing guide <https://support.datacite.org/docs/testing-guide>`_). You can delete it like this:
 
-``./asadmin create-jvm-options '-Ddoi.baseurlstring=https\://mds.datacite.org'``
+``./asadmin delete-jvm-options '-Ddataverse.pid.datacite.mds-api-url=https\://mds.test.datacite.org'``
 
-See also these related database settings below:
+Then, to switch to the production DataCite base URL (see the `DataCite MDS API Guide <https://support.datacite.org/docs/mds-api-guide>`_), you can issue the following command:
 
-- :ref:`:DoiProvider`
-- :ref:`:Protocol`
-- :ref:`:Authority`
-- :ref:`:Shoulder`
+``./asadmin create-jvm-options '-Ddataverse.pid.datacite.mds-api-url=https\://mds.datacite.org'``
 
-.. _doi.dataciterestapiurlstring:
+Without setting an option, always defaults to testing API endpoint.
 
-doi.dataciterestapiurlstring
-++++++++++++++++++++++++++++
+**Notes:**
 
-This configuration option affects the ``updateCitationsForDataset`` API endpoint documented under :ref:`MDC-updateCitationsForDataset` in the Admin Guide as well as the /pids/* API.
+- See also these related database settings below: :ref:`:DoiProvider`, 
+  :ref:`:Protocol`, :ref:`:Authority`, :ref:`:Shoulder`.
+- Can also be set via *MicroProfile Config API* sources, e.g. the environment
+  variable ``DATAVERSE_PID_DATACITE_MDS_API_URL``.
+- This setting was formerly known as ``doi.baseurlstring`` and has been renamed.
+  You should delete and re-add it.
+- While using DataCite directly is recommended because it is tested by the Dataverse
+  Project Team plus field tested with most installations, it is also possible
+  to use a DataCite Client API as a proxy to DataCite. `Since the launch of DataCite Fabrica in
+  2019, the only example by Australian National Data Services (ANDS) has been decommissioned
+  <https://intranet.ands.org.au/display/DOC/DOI+Technical+Changes%3A+Transition+to+DataCite+DOI+services>`_.
 
-As of this writing, "https://api.datacite.org" (DataCite) and "https://api.test.datacite.org" (DataCite Testing) are the main valid values.
 
-Out of the box, the Dataverse Software is configured to use a test DataCite REST API base URL string. You can delete it like this:
+.. _dataverse.pid.datacite.rest-api-url:
 
-``./asadmin delete-jvm-options '-Ddoi.dataciterestapiurlstring=https\://api.test.datacite.org'``
+dataverse.pid.datacite.rest-api-url
++++++++++++++++++++++++++++++++++++
 
-Then, to switch to production DataCite, you can issue the following command:
+Configure the base URL endpoint of the `DataCite REST API <https://support.datacite.org/reference/introduction>`_, used for
+:ref:`PIDs API <pids-api>` information retrieval and :doc:`/admin/make-data-count`.
 
-``./asadmin create-jvm-options '-Ddoi.dataciterestapiurlstring=https\://api.datacite.org'``
+Valid values are "https://api.datacite.org" and "https://api.test.datacite.org". When unset, the default is the testing API endpoint.
 
-For backward compatibility, if this option is not defined, the value of '-Ddoi.mdcbaseurlstring' is used if set. If not the default used is "https\://api.datacite.org:.
+Out of the box, the installer configures your installation to use a DataCite REST test base URL (see DataCite's `testing guide <https://support.datacite.org/docs/testing-guide>`_). You can delete it like this:
 
-See also these related database settings below:
+``./asadmin delete-jvm-options '-Ddataverse.pid.datacite.rest-api-url=https\://api.test.datacite.org'``
 
-- :ref:`:MDCLogPath`
-- :ref:`:DisplayMDCMetrics`
+Then, to switch to the production DataCite base URL (see the `DataCite REST API Guide <https://support.datacite.org/docs/api>`_),
+you can issue the following command:
 
-.. _doi.username:
+``./asadmin create-jvm-options '-Ddataverse.pid.datacite.rest-api-url=https\://api.datacite.org'``
 
-doi.username
-++++++++++++
+**Notes:**
 
-Used in conjuction with ``doi.baseurlstring``.
+- See also these related database settings below: :ref:`:MDCLogPath`,
+  :ref:`:DisplayMDCMetrics`.
+- Can also be set via *MicroProfile Config API* sources, e.g. the environment
+  variable ``DATAVERSE_PID_DATACITE_REST_API_URL``.
+- This setting was formerly known as ``doi.dataciterestapiurlstring`` or
+  ``doi.mdcbaseurlstring`` and has been renamed. You should delete these and re-add it (once) under the new name.
 
-Once you have a username from your provider, you can enter it like this:
+.. _dataverse.pid.datacite.username:
 
-``./asadmin create-jvm-options '-Ddoi.username=YOUR_USERNAME_HERE'``
+dataverse.pid.datacite.username
++++++++++++++++++++++++++++++++
 
-.. _doi.password:
+DataCite uses `HTTP Basic authentication <https://en.wikipedia.org/wiki/Basic_access_authentication>`_
+for `Fabrica <https://doi.datacite.org/>`_ and their APIs. You need to provide
+the same credentials to Dataverse software to mint and manage DOIs for you.
 
-doi.password
-++++++++++++
+Once you have a username from DataCite, you can enter it like this:
 
-Used in conjuction with ``doi.baseurlstring``.
+``./asadmin create-jvm-options '-Ddataverse.pid.datacite.username=YOUR_USERNAME_HERE'``
 
-Once you have a password from your provider, you can enter it like this:
+**Notes:**
 
-``./asadmin create-jvm-options '-Ddoi.password=YOUR_PASSWORD_HERE'``
+- Used in conjuction with :ref:`dataverse.pid.datacite.mds-api-url`, 
+  :ref:`dataverse.pid.datacite.rest-api-url` and :ref:`dataverse.pid.datacite.password`.
+- Can also be set via *MicroProfile Config API* sources, e.g. the environment
+  variable ``DATAVERSE_PID_DATACITE_USERNAME``.
+- This setting was formerly known as ``doi.username`` and has been renamed.
+  You should delete and re-add it.
 
-.. _dataverse.handlenet.admcredfile:
+.. _dataverse.pid.datacite.password:
 
-dataverse.handlenet.admcredfile
+dataverse.pid.datacite.password
 +++++++++++++++++++++++++++++++
 
-If you're using **handles**, this JVM setting configures access credentials so your Dataverse installation can talk to your Handle.Net server. This is the private key generated during Handle.Net server installation. Typically the full path is set to ``handle/svr_1/admpriv.bin``. Please refer to `Handle.Net's documentation <http://handle.net/hnr_documentation.html>`_ for more info.
+Once you have a password from your provider, you should create a password alias.
+This avoids storing it in clear text, although you could use a JVM option `to reference
+a different place <https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Server%20Configuration%20And%20Management/Configuration%20Options/Variable%20Substitution/Types%20of%20Variables.html>`__.
 
-.. _dataverse.handlenet.admprivphrase:
+``./asadmin create-password-alias dataverse.pid.datacite.password``
 
-dataverse.handlenet.admprivphrase
-+++++++++++++++++++++++++++++++++
-This JVM setting is also part of **handles** configuration. The Handle.Net installer lets you choose whether to encrypt the admcredfile private key or not. If you do encrypt it, this is the pass phrase that it's encrypted with.
+It will allow you to enter the password while not echoing the characters.
+To manage these, read up on `Payara docs about password aliases <https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Server%20Configuration%20And%20Management/Configuration%20Options/Password%20Aliases.html#asadmin-commands-password-aliases>`__.
 
-.. _dataverse.handlenet.index:
+**Notes:**
 
-dataverse.handlenet.index
-+++++++++++++++++++++++++
-If you want to use different index than the default 300
+- Used in conjuction with :ref:`dataverse.pid.datacite.mds-api-url`, 
+  :ref:`dataverse.pid.datacite.rest-api-url` and :ref:`dataverse.pid.datacite.username`.
+- Can also be set via *MicroProfile Config API* sources, e.g. the environment
+  variable ``DATAVERSE_PID_DATACITE_PASSWORD`` (although you shouldn't use
+  environment variables for passwords).
+- This setting was formerly known as ``doi.password`` and has been renamed.
+  You should delete the old JVM option and the wrapped password alias, then recreate
+  with new alias name as above.
+
+
+
+.. _dataverse.pid.handlenet.key.path:
+
+dataverse.pid.handlenet.key.path
+++++++++++++++++++++++++++++++++
+
+Related to :ref:`Handle.Net PID provider usage <pids-handle-configuration>`.
+
+Provide an absolute path to a private key file authenticating requests to your
+Handle.Net server.
+
+Handle.Net servers use a public key authentication method where the public key
+is stored in a handle itself and the matching private key is provided from this
+file. Typically, the absolute path ends like ``handle/svr_1/admpriv.bin``. See
+also chapter 1.4 "Authentication" of the `Handle.Net Technical Documentation
+<http://www.handle.net/tech_manual/HN_Tech_Manual_8.pdf>`__
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment
+variable ``DATAVERSE_PID_HANDLENET_KEY_PATH``. This setting was formerly known
+as ``dataverse.handlenet.admcredfile`` and has been renamed. You should delete
+and re-add it.
+
+
+.. _dataverse.pid.handlenet.key.passphrase:
+
+dataverse.pid.handlenet.key.passphrase
+++++++++++++++++++++++++++++++++++++++
+
+Related to :ref:`Handle.Net PID provider usage <pids-handle-configuration>`.
+
+Provide a passphrase to decrypt the :ref:`private key file <dataverse.pid.handlenet.key.path>`.
+
+The key file may (and should) be encrypted with a passphrase (used for
+encryption with AES-128). See also chapter 1.4 "Authentication" of the
+`Handle.Net Technical Documentation <http://www.handle.net/tech_manual/HN_Tech_Manual_8.pdf>`__
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment
+variable ``DATAVERSE_PID_HANDLENET_KEY_PASSPHRASE`` (although you shouldn't use
+environment variables for passwords). This setting was formerly known as
+``dataverse.handlenet.admprivphrase`` and has been renamed. You should delete
+the old JVM option and the wrapped password alias, then recreate as shown for
+:ref:`dataverse.pid.datacite.password` but with this option as alias name.
+
+
+.. _dataverse.pid.handlenet.index:
+
+dataverse.pid.handlenet.index
++++++++++++++++++++++++++++++
+
+Related to :ref:`Handle.Net PID provider usage <pids-handle-configuration>`.
+
+Configure your *Handle.Net Index* to be used registering new persistent
+identifiers. Defaults to ``300``. 
+
+Indices are used to separate concerns within the Handle system. To add data to
+an index, authentication is mandatory. See also chapter 1.4 "Authentication" of
+the `Handle.Net Technical Documentation <http://www.handle.net/tech_manual/HN_Tech_Manual_8.pdf>`__
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment
+variable ``DATAVERSE_PID_HANDLENET_INDEX``. This setting was formerly known as
+``dataverse.handlenet.index`` and has been renamed. You should delete and
+re-add it.
+
+.. _dataverse.pid.permalink.base-url:
+
+dataverse.pid.permalink.base-url
+++++++++++++++++++++++++++++++++
+
+When using :ref:`PermaLinks <permalinks>`, this setting can be used to configure an external resolver. Dataverse will associate a PermaLink PID with the URL:
+``<dataverse.pid.permalink.base-url>/citation?persistentId=perma:<permalink>``. The default value is your Dataverse site URL, which will result in PermaLinks correctly resolving to the appropriate dataset page.
+
+To set this option, issue a command such as:
+
+``./asadmin create-jvm-options '-Ddataverse.pid.permalink.base-url=https\://localresolver.yourdataverse.org'``
+
+See also these related database settings:
+
+- :ref:`:Protocol`
+- :ref:`:Authority`
+- :ref:`:Shoulder`
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment
+variable ``DATAVERSE_PID_PERMALINK_BASE_URL``. This setting was formerly known as
+``perma.baseurlstring`` and has been renamed. You should delete and re-add it.
+
+.. _dataverse.pid.ezid.api-url:
+
+dataverse.pid.ezid.api-url
+++++++++++++++++++++++++++
+
+The EZID DOI provider is likely not an option if you are `not associated with
+California Digital Library (CDL) or Purdue University 
+<https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/>`_.
+
+Defaults to ``https://ezid.cdlib.org``.
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment
+variable ``DATAVERSE_PID_EZID_API_URL``. This setting was formerly known as
+``doi.baseurlstring`` and has been renamed. You should delete and re-add it.
+
+.. _dataverse.pid.ezid.username:
+
+dataverse.pid.ezid.username
++++++++++++++++++++++++++++
+
+The EZID DOI provider is likely not an option if you are `not associated with
+California Digital Library (CDL) or Purdue University 
+<https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/>`_.
+
+Works the same way as :ref:`dataverse.pid.datacite.username`, but for the EZID DOI
+provider.
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment
+variable ``DATAVERSE_PID_EZID_USERNAME``.
+
+This setting was formerly known as ``doi.username`` and has been renamed. You
+should delete and re-add it.
+
+.. _dataverse.pid.ezid.password:
+
+dataverse.pid.ezid.password
++++++++++++++++++++++++++++
+
+The EZID DOI provider is likely not an option if you are `not associated with
+California Digital Library (CDL) or Purdue University 
+<https://www.cdlib.org/cdlinfo/2017/08/04/ezid-doi-service-is-evolving/>`_.
+
+Works the same way as :ref:`dataverse.pid.datacite.password`, but for the EZID DOI
+provider.
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment
+variable ``DATAVERSE_PID_EZID_PASSWORD`` (although you shouldn't use
+environment variables for passwords). 
+
+This setting was formerly known as ``doi.password`` and has been renamed. You
+should delete the old JVM option and the wrapped password alias, then recreate
+as shown for :ref:`dataverse.pid.datacite.password` but with the EZID alias
+name.
 
 .. _dataverse.timerServer:
 
@@ -2098,7 +2472,178 @@ Can also be set via any `supported MicroProfile Config API source`_, e.g. the en
 **WARNING:** For security, do not use the sources "environment variable" or "system property" (JVM option) in a
 production context! Rely on password alias, secrets directory or cloud based sources instead!
 
+.. _dataverse.api.allow-incomplete-metadata:
+
+dataverse.api.allow-incomplete-metadata
++++++++++++++++++++++++++++++++++++++++
+
+When enabled, dataset with incomplete metadata can be submitted via API for later corrections.
+See :ref:`create-dataset-command` for details.
+
+Defaults to ``false``.
+
+Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable
+``DATAVERSE_API_ALLOW_INCOMPLETE_METADATA``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions.
+
+.. _dataverse.signposting.level1-author-limit:
+
+dataverse.signposting.level1-author-limit
++++++++++++++++++++++++++++++++++++++++++
+
+See :ref:`discovery-sign-posting` for details.
+
+Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_SIGNPOSTING_LEVEL1_AUTHOR_LIMIT``.
+
+.. _dataverse.signposting.level1-item-limit:
+
+dataverse.signposting.level1-item-limit
++++++++++++++++++++++++++++++++++++++++
+
+See :ref:`discovery-sign-posting` for details.
+
+Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_SIGNPOSTING_LEVEL1_ITEM_LIMIT``.
+
+dataverse.mail.support-email
+++++++++++++++++++++++++++++
+
+This provides an email address distinct from the :ref:`systemEmail` that will be used as the email address for Contact Forms and Feedback API. This address is used as the To address when the Contact form is launched from the Support entry in the top navigation bar and, if configured via :ref:`dataverse.mail.cc-support-on-contact-email`, as a CC address when the form is launched from a Dataverse/Dataset Contact button.
+This allows configuration of a no-reply email address for :ref:`systemEmail` while allowing feedback to go to/be cc'd to the support email address, which would normally accept replies. If not set, the :ref:`systemEmail` is used for the feedback API/contact form email.
+
+Note that only the email address is required, which you can supply without the ``<`` and ``>`` signs, but if you include the text, it's the way to customize the name of your support team, which appears in the "from" address in emails as well as in help text in the UI. If you don't include the text, the installation name (see :ref:`Branding Your Installation`) will appear in the "from" address.
+
+Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable ``DATAVERSE_MAIL_SUPPORT_EMAIL``.
+
+.. _dataverse.mail.cc-support-on-contact-email:
+
+dataverse.mail.cc-support-on-contact-email
+++++++++++++++++++++++++++++++++++++++++++
+
+If this setting is true, the contact forms and feedback API will cc the system (:SupportEmail if set, :SystemEmail if not) when sending email to the collection, dataset, or datafile contacts.
+A CC line is added to the contact form when this setting is true so that users are aware that the cc will occur.
+The default is false.
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_MAIL_CC_SUPPORT_ON_CONTACT_EMAIL``.
+
+dataverse.ui.allow-review-for-incomplete
+++++++++++++++++++++++++++++++++++++++++
+
+Determines if dataset submitted via API with incomplete metadata (for later corrections) can be submitted for review
+from the UI.
+
+Defaults to ``false``.
+
+Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable
+``DATAVERSE_UI_ALLOW_REVIEW_FOR_INCOMPLETE``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions.
+
+dataverse.ui.show-validity-filter
++++++++++++++++++++++++++++++++++
+
+When enabled, the filter for validity of metadata is shown in "My Data" page.
+**Note:** When you wish to use this filter, you must reindex the datasets first, otherwise datasets with valid metadata
+will not be shown in the results.
+
+Defaults to ``false``.
+
+Can also be set via any `supported MicroProfile Config API source`_, e.g. the environment variable
+``DATAVERSE_UI_SHOW_VALIDITY_FILTER``. Will accept ``[tT][rR][uU][eE]|1|[oO][nN]`` as "true" expressions.
+
+.. _dataverse.spi.exporters.directory:
+
+dataverse.spi.exporters.directory
++++++++++++++++++++++++++++++++++
+
+This JVM option is used to configure the file system path where external Exporter JARs can be placed. See :ref:`external-exporters` for more information.
+
+``./asadmin create-jvm-options '-Ddataverse.spi.exporters.directory=PATH_LOCATION_HERE'``
+
+If this value is set, Dataverse will examine all JARs in the specified directory and will use them to add, or replace existing, metadata export formats.
+If this value is not set (the default), Dataverse will not use external Exporters.
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_SPI_EXPORTERS_DIRECTORY``.
+
+.. _dataverse.netcdf.geo-extract-s3-direct-upload:
+
+dataverse.netcdf.geo-extract-s3-direct-upload
++++++++++++++++++++++++++++++++++++++++++++++
+
+This setting was added to keep S3 direct upload lightweight. When that feature is enabled and you still want NetCDF and HDF5 files to go through metadata extraction of a Geospatial Bounding Box (see :ref:`netcdf-and-hdf5`), which requires the file to be downloaded from S3 in this scenario, make this setting true.
+
+See also :ref:`s3-direct-upload-features-disabled`.
+
+.. _dataverse.storageuse.disable-storageuse-increments:
+
+dataverse.storageuse.disable-storageuse-increments
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+This setting serves the role of an emergency "kill switch" that will disable maintaining the real time record of storage use for all the datasets and collections in the database. Because of the experimental nature of this feature (see :doc:`/admin/collectionquotas`) that hasn't been used in production setting as of this release, v6.1 this setting is provided in case these updates start causing database race conditions and conflicts on a busy server. 
+
+dataverse.auth.oidc.*
++++++++++++++++++++++
+
+Provision a single :doc:`OpenID Connect authentication provider <oidc>` using MicroProfile Config. You can find a list of
+all available options at :ref:`oidc-mpconfig`.
+
+.. _dataverse.files.guestbook-at-request:
+
+dataverse.files.guestbook-at-request
+++++++++++++++++++++++++++++++++++++
+
+This setting enables functionality to allow guestbooks to be displayed when a user requests access to a restricted data file(s) or when a file is downloaded (the historic default). Providing a true/false value for this setting enables the functionality and provides a global default. The behavior can also be changed at the collection level via the user interface and by a superuser for a give dataset using the API.
+
+See also :ref:`guestbook-at-request-api` in the API Guide, and .
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_GUESTBOOK_AT_REQUEST``.
+
+.. _dataverse.bagit.sourceorg.name:
+
+dataverse.bagit.sourceorg.name
+++++++++++++++++++++++++++++++
+
+The name for your institution that you'd like to appear in bag-info.txt. See :ref:`bag-info.txt`.
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_BAGIT_SOURCEORG_NAME``.
+
+.. _dataverse.bagit.sourceorg.address:
+
+dataverse.bagit.sourceorg.address
++++++++++++++++++++++++++++++++++
+
+The mailing address for your institution that you'd like to appear in bag-info.txt. See :ref:`bag-info.txt`. The example in https://datatracker.ietf.org/doc/html/rfc8493 uses commas as separators: ``1 Main St., Cupertino, California, 11111``.
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_BAGIT_SOURCEORG_ADDRESS``.
+
+.. _dataverse.bagit.sourceorg.email:
+
+dataverse.bagit.sourceorg.email
++++++++++++++++++++++++++++++++
+
+The email for your institution that you'd like to appear in bag-info.txt. See :ref:`bag-info.txt`.
+
+Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_BAGIT_SOURCEORG_EMAIL``.
+
+.. _feature-flags:
+
+Feature Flags
+-------------
+
+Certain features might be deactivated because they are experimental and/or opt-in previews. If you want to enable these,
+please find all known feature flags below. Any of these flags can be activated using a boolean value
+(case-insensitive, one of "true", "1", "YES", "Y", "ON") for the setting.
+
+.. list-table::
+    :widths: 35 50 15
+    :header-rows: 1
+    :align: left
+
+    * - Flag Name
+      - Description
+      - Default status
+    * - api-session-auth
+      - Enables API authentication via session cookie (JSESSIONID). **Caution: Enabling this feature flag exposes the installation to CSRF risks!** We expect this feature flag to be temporary (only used by frontend developers, see `#9063 <https://github.com/IQSS/dataverse/issues/9063>`_) and for the feature to be removed in the future.
+      - ``Off``
 
+**Note:** Feature flags can be set via any `supported MicroProfile Config API source`_, e.g. the environment variable
+``DATAVERSE_FEATURE_XXX`` (e.g. ``DATAVERSE_FEATURE_API_SESSION_AUTH=1``). These environment variables can be set in your shell before starting Payara. If you are using :doc:`Docker for development </container/dev-usage>`, you can set them in the `docker compose <https://docs.docker.com/compose/environment-variables/set-environment-variables/>`_ file.
 
 .. _:ApplicationServerSettings:
 
@@ -2126,6 +2671,8 @@ This is best done with a system property:
 
 ``./asadmin create-system-properties 'mp.config.profile=ct'``
 
+*Note: the* :doc:`../container/app-image` *uses an (overrideable) environment variable to activate this.*
+
 You might also create your own profiles and use these, please refer to the upstream documentation linked above.
 
 
@@ -2261,22 +2808,28 @@ By default the footer says "Copyright © [YYYY]" but you can add text after the
 :DoiProvider
 ++++++++++++
 
-As of this writing "DataCite" and "EZID" are the only valid options for production installations. Developers using Dataverse Software 4.10+ are welcome to use the keyword "FAKE" to configure a non-production installation with an non-resolving, in-code provider, which will basically short-circuit the DOI publishing process. ``:DoiProvider`` is only needed if you are using DOI.
+As of this writing "DataCite" and "EZID" are the only valid options for production installations. Developers using
+Dataverse Software 4.10+ are welcome to use the keyword "FAKE" to configure a non-production installation with an
+non-resolving, in-code provider, which will basically short-circuit the DOI publishing process. ``:DoiProvider``
+is only needed if you are using DOI.
 
 ``curl -X PUT -d DataCite http://localhost:8080/api/admin/settings/:DoiProvider``
 
-This setting relates to the ``:Protocol``, ``:Authority``, ``:Shoulder``, and ``:IdentifierGenerationStyle`` database settings below as well as the following JVM options:
+This setting relates to the ``:Protocol``, ``:Authority``, ``:Shoulder``, and
+``:IdentifierGenerationStyle`` database settings below as well as the following
+JVM options:
 
-- :ref:`doi.baseurlstring`
-- :ref:`doi.username`
-- :ref:`doi.password`
+- :ref:`dataverse.pid.datacite.mds-api-url`
+- :ref:`dataverse.pid.datacite.rest-api-url`
+- :ref:`dataverse.pid.datacite.username`
+- :ref:`dataverse.pid.datacite.password`
 
 .. _:Protocol:
 
 :Protocol
 +++++++++
 
-As of this writing "doi" and "hdl" are the only valid option for the protocol for a persistent ID.
+As of this writing "doi","hdl", and "perma" are the only valid option for the protocol for a persistent ID.
 
 ``curl -X PUT -d doi http://localhost:8080/api/admin/settings/:Protocol``
 
@@ -2285,9 +2838,9 @@ As of this writing "doi" and "hdl" are the only valid option for the protocol fo
 :Authority
 ++++++++++
 
-Use the authority assigned to you by your DoiProvider or HandleProvider.
+Use the authority assigned to you by your DoiProvider or HandleProvider, or your choice if using PermaLinks.
 
-Please note that the authority cannot have a slash ("/") in it.
+Please note that a DOI or Handle authority cannot have a slash ("/") in it (slash is also not recommended for PermaLink authorities).
 
 ``curl -X PUT -d 10.xxxx http://localhost:8080/api/admin/settings/:Authority``
 
@@ -2296,7 +2849,7 @@ Please note that the authority cannot have a slash ("/") in it.
 :Shoulder
 +++++++++
 
-Out of the box, the DOI shoulder is set to "FK2/" but this is for testing only! When you apply for your DOI namespace, you may have requested a shoulder. The following is only an example and a trailing slash is optional.
+The shoulder is used with DOIs and PermaLinks. Out of the box, the shoulder is set to "FK2/" but this is for testing only! When you apply for your DOI authority/namespace, you may have been assigned a shoulder. The following is only an example and a trailing slash is optional.
 
 ``curl -X PUT -d "MyShoulder/" http://localhost:8080/api/admin/settings/:Shoulder``
 
@@ -2401,13 +2954,34 @@ timestamps.
 :FilePIDsEnabled
 ++++++++++++++++
 
-Toggles publishing of file-based PIDs for the entire installation. By default this setting is absent and Dataverse Software assumes it to be true. If enabled, the registration will be performed asynchronously (in the background) during publishing of a dataset.
+Toggles publishing of file-level PIDs for the entire installation. By default this setting is absent and Dataverse Software assumes it to be false. If enabled, the registration will be performed asynchronously (in the background) during publishing of a dataset.
+
+It is possible to override the installation-wide setting for specific collections, see :ref:`:AllowEnablingFilePIDsPerCollection <:AllowEnablingFilePIDsPerCollection>`. For example, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See :ref:`collection-attributes-api` for details. 
+
+To enable file-level PIDs for the entire installation::
+
+``curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled``
+
 
-If you don't want to register file-based PIDs for your installation, set:
+If you don't want to register file-based PIDs for your entire installation::
 
 ``curl -X PUT -d 'false' http://localhost:8080/api/admin/settings/:FilePIDsEnabled``
 
-Note: File-level PID registration was added in Dataverse Software 4.9; it could not be disabled until Dataverse Software 4.9.3.
+.. _:AllowEnablingFilePIDsPerCollection:
+
+:AllowEnablingFilePIDsPerCollection
++++++++++++++++++++++++++++++++++++
+
+Toggles whether superusers can change the File PIDs policy per collection. By default this setting is absent and Dataverse Software assumes it to be false.
+
+For example, if this setting is true, registration of PIDs for files can be enabled in a specific collection when it is disabled instance-wide. Or it can be disabled in specific collections where it is enabled by default. See :ref:`collection-attributes-api` for details. 
+
+To enable setting file-level PIDs per collection::
+
+``curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:AllowEnablingFilePIDsPerCollection``
+
+
+When :AllowEnablingFilePIDsPerCollection is true, setting File PIDs to be enabled/disabled for a given collection can be done via the Native API - see :ref:`collection-attributes-api` in the Native API Guide.
 
 .. _:IndependentHandleService:
 
@@ -2519,7 +3093,7 @@ Note: by default, the URL is composed from the settings ``:GuidesBaseUrl`` and `
 :GuidesBaseUrl
 ++++++++++++++
 
-Set ``:GuidesBaseUrl`` to override the default value "http://guides.dataverse.org". If you are interested in writing your own version of the guides, you may find the :doc:`/developers/documentation` section of the Developer Guide helpful.
+Set ``:GuidesBaseUrl`` to override the default value "https://guides.dataverse.org". If you are interested in writing your own version of the guides, you may find the :doc:`/developers/documentation` section of the Developer Guide helpful.
 
 ``curl -X PUT -d http://dataverse.example.edu http://localhost:8080/api/admin/settings/:GuidesBaseUrl``
 
@@ -2540,14 +3114,14 @@ Set ``:NavbarSupportUrl`` to a fully-qualified URL which will be used for the "S
 
 Note that this will override the default behaviour for the "Support" menu option, which is to display the Dataverse collection 'feedback' dialog.
 
-``curl -X PUT -d http://dataverse.example.edu/supportpage.html http://localhost:8080/api/admin/settings/:NavbarSupportUrl``
+``curl -X PUT -d https://dataverse.example.edu/supportpage.html http://localhost:8080/api/admin/settings/:NavbarSupportUrl``
 
 :MetricsUrl
 +++++++++++
 
 Make the metrics component on the root Dataverse collection a clickable link to a website where you present metrics on your Dataverse installation, perhaps one of the community-supported tools mentioned in the :doc:`/admin/reporting-tools-and-queries` section of the Admin Guide.
 
-``curl -X PUT -d http://metrics.dataverse.example.edu http://localhost:8080/api/admin/settings/:MetricsUrl``
+``curl -X PUT -d https://metrics.dataverse.example.edu http://localhost:8080/api/admin/settings/:MetricsUrl``
 
 .. _:MaxFileUploadSizeInBytes:
 
@@ -2578,6 +3152,8 @@ This setting controls the number of files that can be uploaded through the UI at
 
 ``curl -X PUT -d 500 http://localhost:8080/api/admin/settings/:MultipleUploadFilesLimit``
 
+.. _:ZipDownloadLimit:
+
 :ZipDownloadLimit
 +++++++++++++++++
 
@@ -2603,12 +3179,18 @@ You can override this global setting on a per-format basis for the following for
 - SAV
 - Rdata
 - CSV
-- XLSX
+- XLSX (in lower-case)
+
+For example :
 
-For example, if you want your Dataverse installation to not attempt to ingest Rdata files larger than 1 MB, use this setting:
+* if you want your Dataverse installation to not attempt to ingest Rdata files larger than 1 MB, use this setting:
 
 ``curl -X PUT -d 1000000 http://localhost:8080/api/admin/settings/:TabularIngestSizeLimit:Rdata``
 
+* if you want your Dataverse installation to not attempt to ingest XLSX files at all, use this setting:
+
+``curl -X PUT -d 0 http://localhost:8080/api/admin/settings/:TabularIngestSizeLimit:xlsx``
+
 :ZipUploadFilesLimit
 ++++++++++++++++++++
 
@@ -2641,6 +3223,21 @@ If ``:SolrFullTextIndexing`` is set to true, the content of files of any size wi
 
 ``curl -X PUT -d 314572800 http://localhost:8080/api/admin/settings/:SolrMaxFileSizeForFullTextIndexing``
 
+
+.. _:DisableSolrFacets:
+
+:DisableSolrFacets
+++++++++++++++++++
+
+Setting this to ``true`` will make the collection ("dataverse") page start showing search results without the usual search facets on the left side of the page. A message will be shown in that column informing the users that facets are temporarily unavailable. Generating the facets is more resource-intensive for Solr than the main search results themselves, so applying this measure will significantly reduce the load on the search engine when its performance becomes an issue.
+
+This setting can be used in combination with the "circuit breaker" mechanism on the Solr side (see the "Installing Solr" section of the Installation Prerequisites guide). An admin can choose to enable it, or even create an automated system for enabling it in response to Solr beginning to drop incoming requests with the HTTP code 503.
+
+To enable the setting::
+
+  curl -X PUT -d true "http://localhost:8080/api/admin/settings/:DisableSolrFacets"
+
+
 .. _:SignUpUrl:
 
 :SignUpUrl
@@ -2747,6 +3344,8 @@ This curl command...
 
 See also :doc:`oauth2`.
 
+.. _:FileFixityChecksumAlgorithm:
+
 :FileFixityChecksumAlgorithm
 ++++++++++++++++++++++++++++
 
@@ -2756,12 +3355,9 @@ The default checksum algorithm used is MD5 and should be sufficient for establis
 
 ``curl -X PUT -d 'SHA-512' http://localhost:8080/api/admin/settings/:FileFixityChecksumAlgorithm``
 
-The fixity algorithm used on existing files can be changed by a superuser using the API. An optional query parameter (num) can be used to limit the number of updates attempted.
-The API call will only update the algorithm and checksum for a file if the existing checksum can be validated against the file.
-Statistics concerning the updates are returned in the response to the API call with details in the log.
+To update the algorithm used for existing files, see :ref:`UpdateChecksums`
 
-``curl http://localhost:8080/api/admin/updateHashValues/{alg}``
-``curl http://localhost:8080/api/admin/updateHashValues/{alg}?num=1``
+The fixity checksum algorithm in use can be discovered via API. See :ref:`get-fixity-algorithm` in the API Guide.
 
 .. _:PVMinLength:
 
@@ -3041,6 +3637,8 @@ Limit on how many guestbook entries to display on the guestbook-responses page.
 
 ``curl -X PUT -d 10000 http://localhost:8080/api/admin/settings/:GuestbookResponsesPageDisplayLimit``
 
+.. _:CustomDatasetSummaryFields:
+
 :CustomDatasetSummaryFields
 +++++++++++++++++++++++++++
 
@@ -3050,6 +3648,10 @@ You can replace the default dataset metadata fields that are displayed above fil
 
 You have to put the datasetFieldType name attribute in the :CustomDatasetSummaryFields setting for this to work.
 
+The default fields are ``dsDescription,subject,keyword,publication,notesText``.
+
+This setting can be retrieved via API. See :ref:`get-dataset-summary-field-names` in the API Guide.
+
 :AllowApiTokenLookupViaApi
 ++++++++++++++++++++++++++
 
@@ -3078,7 +3680,7 @@ Sets how long a cached metrics result is used before re-running the query for a
 
 Sets the path where the raw Make Data Count logs are stored before being processed. If not set, no logs will be created for Make Data Count. See also the :doc:`/admin/make-data-count` section of the Admin Guide.
 
-``curl -X PUT -d '/usr/local/payara5/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath``
+``curl -X PUT -d '/usr/local/payara6/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath``
 
 .. _:DisplayMDCMetrics:
 
@@ -3089,6 +3691,20 @@ Sets the path where the raw Make Data Count logs are stored before being process
 
 ``curl -X PUT -d 'false' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics``
 
+.. _:MDCStartDate:
+
+:MDCStartDate
++++++++++++++
+
+It is possible to display MDC metrics (as of the start date of MDC logging) along with legacy download counts, generated before MDC was enabled.
+This is enabled via the new setting `:MDCStartDate` that specifies the cut-over date. If a dataset has any legacy access counts collected prior to that date, those numbers will be displayed in addition to the MDC views and downloads recorded since then.
+(Nominally, this date should be when your installation started logging MDC metrics but it can be any date after that if desired.)
+
+
+``curl -X PUT -d '2019-10-01' http://localhost:8080/api/admin/settings/:MDCStartDate``
+
+
+
 .. _:Languages:
 
 :Languages
@@ -3428,6 +4044,8 @@ For example:
 
 When set to ``true``, this setting allows a superuser to publish and/or update Dataverse collections and datasets bypassing the external validation checks (specified by the settings above). In an event where an external script is reporting validation failures that appear to be in error, this option gives an admin with superuser privileges a quick way to publish the dataset or update a collection for the user. 
 
+.. _:FileCategories:
+
 :FileCategories
 +++++++++++++++
 
@@ -3486,24 +4104,9 @@ The URL of an LDN Inbox to which the LDN Announce workflow step will send messag
 
 The list of parent dataset field names for which the LDN Announce workflow step should send messages. See :doc:`/developers/workflows` for details.
 
-.. _:GlobusBasicToken:
-
-:GlobusBasicToken
-+++++++++++++++++
-
-GlobusBasicToken encodes credentials for Globus integration. See :ref:`globus-support` for details.
-
-:GlobusEndpoint
-+++++++++++++++
-
-GlobusEndpoint is Globus endpoint id used with Globus integration. See :ref:`globus-support` for details.
+.. _:GlobusSettings:
 
-:GlobusStores
-+++++++++++++
-
-A comma-separated list of the S3 stores that are configured to support Globus integration. See :ref:`globus-support` for details.
-
-:GlobusAppURL
+:GlobusAppUrl
 +++++++++++++
 
 The URL where the `dataverse-globus <https://github.com/scholarsportal/dataverse-globus>`_ "transfer" app has been deployed to support Globus integration. See :ref:`globus-support` for details.
@@ -3523,6 +4126,28 @@ A true/false option to add a Globus transfer option to the file download menu wh
 :WebloaderUrl
 +++++++++++++
 
-The URL for main HTML file in https://github.com/gdcc/dvwebloader when that app is deployed. See also :ref:`:UploadMethods` for another required settings.
+The URL of `dvuploader <https://github.com/gdcc/dvwebloader>`'s HTML file when dvuploader is enabled in :ref:`:UploadMethods`.
+
+To use the current GDCC version directly:
+
+``curl -X PUT -d 'https://gdcc.github.io/dvwebloader/src/dvwebloader.html' http://localhost:8080/api/admin/settings/:WebloaderUrl``
+
+:CategoryOrder
+++++++++++++++
+
+A comma separated list of Category/Tag names defining the order in which files with those tags should be displayed. 
+The setting can include custom tag names along with the pre-defined tags(Documentation, Data, and Code are the defaults but the :ref:`:FileCategories` setting can be used to use a different set of tags).
+The default is category ordering disabled.
+
+:OrderByFolder
+++++++++++++++
+
+A true(default)/false option determining whether datafiles listed on the dataset page should be grouped by folder.
+
+:AllowUserManagementOfOrder
++++++++++++++++++++++++++++
+
+A true/false (default) option determining whether the dataset datafile table display includes checkboxes enabling users to turn folder ordering and/or category ordering (if an order is defined by :CategoryOrder) on and off dynamically. 
 
 .. _supported MicroProfile Config API source: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Overview.html
+
diff --git a/doc/sphinx-guides/source/installation/installation-main.rst b/doc/sphinx-guides/source/installation/installation-main.rst
index 8559d6ce194..46c1b0b0af3 100755
--- a/doc/sphinx-guides/source/installation/installation-main.rst
+++ b/doc/sphinx-guides/source/installation/installation-main.rst
@@ -28,8 +28,8 @@ Unpack the zip file - this will create the directory ``dvinstall``.
 
 Just make sure the user running the installer has write permission to:
 
-- /usr/local/payara5/glassfish/lib
-- /usr/local/payara5/glassfish/domains/domain1
+- /usr/local/payara6/glassfish/lib
+- /usr/local/payara6/glassfish/domains/domain1
 - the current working directory of the installer (it currently writes its logfile there), and
 - your jvm-option specified files.dir
 
@@ -47,7 +47,7 @@ Follow the instructions in the text file.
 The script will prompt you for some configuration values. If this is a test/evaluation installation, it may be possible to accept the default values provided for most of the settings:
 
 - Internet Address of your host: localhost
-- Payara Directory: /usr/local/payara5
+- Payara Directory: /usr/local/payara6
 - Payara User: current user running the installer script
 - Administrator email address for this Dataverse installation: (none)
 - SMTP (mail) server to relay notification messages: localhost
@@ -98,9 +98,9 @@ The supplied site URL will be saved under the JVM option :ref:`dataverse.siteUrl
 
 **IMPORTANT:** Please note, that "out of the box" the installer will configure the Dataverse installation to leave unrestricted access to the administration APIs from (and only from) localhost. Please consider the security implications of this arrangement (anyone with shell access to the server can potentially mess with your Dataverse installation). An alternative solution would be to block open access to these sensitive API endpoints completely; and to only allow requests supplying a pre-defined "unblock token" (password). If you prefer that as a solution, please consult the supplied script ``post-install-api-block.sh`` for examples on how to set it up. See also "Securing Your Installation" under the :doc:`config` section.
 
-The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara5/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara.
+The Dataverse Software uses JHOVE_ to help identify the file format (CSV, PNG, etc.) for files that users have uploaded. The installer places files called ``jhove.conf`` and ``jhoveConfig.xsd`` into the directory ``/usr/local/payara6/glassfish/domains/domain1/config`` by default and makes adjustments to the jhove.conf file based on the directory into which you chose to install Payara.
 
-.. _JHOVE: http://jhove.openpreservation.org
+.. _JHOVE: https://jhove.openpreservation.org
 
 Logging In
 ----------
@@ -120,7 +120,7 @@ Use the following credentials to log in:
 - username: dataverseAdmin
 - password: admin
 
-Congratulations! You have a working Dataverse installation. Soon you'll be tweeting at `@dataverseorg <https://twitter.com/dataverseorg>`_ asking to be added to the map at http://dataverse.org :)
+Congratulations! You have a working Dataverse installation. Soon you'll be tweeting at `@dataverseorg <https://twitter.com/dataverseorg>`_ asking to be added to the map at https://dataverse.org :)
 
 Trouble? See if you find an answer in the troubleshooting section below.
 
@@ -204,7 +204,7 @@ Be sure you save the changes made here and then restart your Payara server to te
 UnknownHostException While Deploying
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-If you are seeing "Caused by: java.net.UnknownHostException: myhost: Name or service not known" in server.log and your hostname is "myhost" the problem is likely that "myhost" doesn't appear in ``/etc/hosts``. See also http://stackoverflow.com/questions/21817809/glassfish-exception-during-deployment-project-with-stateful-ejb/21850873#21850873
+If you are seeing "Caused by: java.net.UnknownHostException: myhost: Name or service not known" in server.log and your hostname is "myhost" the problem is likely that "myhost" doesn't appear in ``/etc/hosts``. See also https://stackoverflow.com/questions/21817809/glassfish-exception-during-deployment-project-with-stateful-ejb/21850873#21850873
 
 .. _fresh-reinstall:
 
@@ -249,7 +249,7 @@ Deleting Uploaded Files
 
 The path below will depend on the value for ``dataverse.files.directory`` as described in the :doc:`config` section:
 
-``rm -rf /usr/local/payara5/glassfish/domains/domain1/files``
+``rm -rf /usr/local/payara6/glassfish/domains/domain1/files``
 
 Rerun Installer
 ^^^^^^^^^^^^^^^
diff --git a/doc/sphinx-guides/source/installation/intro.rst b/doc/sphinx-guides/source/installation/intro.rst
index 2251af7b81b..6d77a1209b2 100644
--- a/doc/sphinx-guides/source/installation/intro.rst
+++ b/doc/sphinx-guides/source/installation/intro.rst
@@ -2,7 +2,7 @@
 Introduction
 ============
 
-Welcome! Thanks for installing `The Dataverse Project <http://dataverse.org>`_!
+Welcome! Thanks for installing `The Dataverse Project <https://dataverse.org>`_!
 
 .. contents:: |toctitle|
 	:local:
@@ -36,7 +36,7 @@ Getting Help
 To get help installing or configuring a Dataverse installation, please try one or more of:
 
 - posting to the `dataverse-community <https://groups.google.com/forum/#!forum/dataverse-community>`_ Google Group.
-- asking at http://chat.dataverse.org
+- asking at https://chat.dataverse.org
 - emailing support@dataverse.org to open a private ticket at https://help.hmdc.harvard.edu
 
 Information to Send to Support When Installation Fails
@@ -48,7 +48,7 @@ If you've encountered a problem installing Dataverse and are ready to ask for he
 - Operating system (usually a Linux distribution) and version.
 - Output from the installer (STDOUT, STDERR).
 - The ``scripts/api/setup-all.*.log`` files left behind by the installer.
-- The ``server.log`` file from Payara (by default at ``/usr/local/payara5/glassfish/domains/domain1/logs/server.log``).
+- The ``server.log`` file from Payara (by default at ``/usr/local/payara6/glassfish/domains/domain1/logs/server.log``).
 
 Improving this Guide
 --------------------
diff --git a/doc/sphinx-guides/source/installation/oauth2.rst b/doc/sphinx-guides/source/installation/oauth2.rst
index 8dffde87cc2..7a0e938b572 100644
--- a/doc/sphinx-guides/source/installation/oauth2.rst
+++ b/doc/sphinx-guides/source/installation/oauth2.rst
@@ -11,7 +11,7 @@ As explained under "Auth Modes" in the :doc:`config` section, OAuth2 is one of t
 
 `OAuth2 <https://oauth.net/2/>`_ is an authentication protocol that allows systems to share user data, while letting the users control what data is being shared. When you see buttons stating "login with Google" or "login through Facebook", OAuth2 is probably involved. For the purposes of this section, we will shorten "OAuth2" to just "OAuth." OAuth can be compared and contrasted with :doc:`shibboleth`.
 
-The Dataverse Software supports four OAuth providers: `ORCID <http://orcid.org>`_, `Microsoft Azure Active Directory (AD) <https://docs.microsoft.com/azure/active-directory/>`_, `GitHub <https://github.com>`_, and `Google <https://console.developers.google.com>`_.
+The Dataverse Software supports four OAuth providers: `ORCID <https://orcid.org>`_, `Microsoft Azure Active Directory (AD) <https://docs.microsoft.com/azure/active-directory/>`_, `GitHub <https://github.com>`_, and `Google <https://console.developers.google.com>`_.
 
 In addition :doc:`oidc` are supported, using a standard based on OAuth2.
 
diff --git a/doc/sphinx-guides/source/installation/oidc.rst b/doc/sphinx-guides/source/installation/oidc.rst
index a40ef758dc7..d132fd2953d 100644
--- a/doc/sphinx-guides/source/installation/oidc.rst
+++ b/doc/sphinx-guides/source/installation/oidc.rst
@@ -16,7 +16,7 @@ Being a standard, you can easily enable the use of any OpenID connect compliant
 Some prominent provider examples:
 
 - `Google <https://developers.google.com/identity/protocols/OpenIDConnect>`_
-- `Microsoft Azure AD <https://docs.microsoft.com/de-de/azure/active-directory/develop/v2-protocols-oidc>`_
+- `Microsoft Azure AD <https://learn.microsoft.com/en-us/azure/active-directory/develop/v2-protocols-oidc>`_
 - `Yahoo <https://developer.yahoo.com/oauth2/guide/openid_connect>`_
 - ORCID `announced support <https://orcid.org/blog/2019/04/17/orcid-openid-connect-and-implicit-authentication>`_
 
@@ -26,7 +26,7 @@ You can also either host an OpenID Connect identity management on your own or us
 - `Keycloak <https://www.keycloak.org>`_ is an open source solution for an IDM/IAM
 - `Unity IDM <https://www.unity-idm.eu>`_ is another open source IDM/IAM solution
 
-Other use cases and combinations
+Other Use Cases and Combinations
 --------------------------------
 
 - Using your custom identity management solution might be a workaround when you seek for LDAP support, but
@@ -41,7 +41,7 @@ Other use cases and combinations
 - In the future, OpenID Connect might become a successor to the large scale R&E SAML federations we have nowadays.
   See also `OpenID Connect Federation Standard <https://openid.net/specs/openid-connect-federation-1_0.html>`_ (in development)
 
-How to use
+How to Use
 ----------
 
 Just like with :doc:`oauth2` you need to obtain a *Client ID* and a *Client Secret* from your provider(s).
@@ -51,7 +51,7 @@ Just like with :doc:`oauth2` you need to obtain a *Client ID* and a *Client Secr
   You need to apply for credentials out-of-band.
 
 The Dataverse installation will discover all necessary metadata for a given provider on its own (this is `part of the standard
-<http://openid.net/specs/openid-connect-discovery-1_0.html>`_).
+<https://openid.net/specs/openid-connect-discovery-1_0.html>`_).
 
 To enable this, you need to specify an *Issuer URL* when creating the configuration for your provider (see below).
 
@@ -59,18 +59,61 @@ Finding the issuer URL is best done by searching for terms like "discovery" in t
 The discovery document is always located at ``<issuer url>/.well-known/openid-configuration`` (standardized).
 To be sure, you can always lookup the ``issuer`` value inside the live JSON-based discovery document.
 
-Please create a my-oidc-provider.json file like this, replacing every ``<...>`` with your values:
+Note if you work with Keycloak, make sure the base URL is in the following format: ``https://host:port/realms/{realm}``
+where ``{realm}`` has to be replaced by the name of the Keycloak realm.
 
-.. code-block:: json
+After adding a provider, the Log In page will by default show the "builtin" provider, but you can adjust this via the
+``:DefaultAuthProvider`` configuration option. For details, see :doc:`config`.
 
-    {
-        "id":"<a unique id>",
-        "factoryAlias":"oidc",
-        "title":"<a title - shown in UI>",
-        "subtitle":"<a subtitle - currently unused in UI>",
-        "factoryData":"type: oidc | issuer: <issuer url> | clientId: <client id> | clientSecret: <client secret>",
-        "enabled":true
-    }
+.. hint::
+   In contrast to our :doc:`oauth2`, you can use multiple providers by creating distinct configurations enabled by
+   the same technology and without modifying the Dataverse Software code base (standards for the win!).
+
+
+.. _oidc-pkce:
+
+Enabling PKCE Security
+^^^^^^^^^^^^^^^^^^^^^^
+
+Many providers these days support or even require the usage of `PKCE <https://oauth.net/2/pkce/>`_ to safeguard against
+some attacks and enable public clients that cannot have a secure secret to still use OpenID Connect (or OAuth2).
+
+The Dataverse-built OIDC client can be configured to use PKCE and the method to use when creating the code challenge can be specified.
+See also `this explanation of the flow <https://auth0.com/docs/get-started/authentication-and-authorization-flow/authorization-code-flow-with-proof-key-for-code-exchange-pkce>`_
+for details on how this works.
+
+As we are using the `Nimbus SDK <https://connect2id.com/products/nimbus-oauth-openid-connect-sdk>`_ as our client
+library, we support the standard ``PLAIN`` and ``S256`` (SHA-256) code challenge methods. "SHA-256 method" is the default
+as recommend in `RFC7636 <https://datatracker.ietf.org/doc/html/rfc7636#section-4.2>`_. If your provider needs some
+other method, please open an issue.
+
+The provisioning sections below contain in the example the parameters you may use to configure PKCE.
+
+Provision a Provider
+--------------------
+
+Depending on your use case, you can choose different ways to setup one or multiple OIDC identity providers.
+
+Using :ref:`jvm-options` has the advantage of being consistent and does not require additional calls to the API.
+It can only configure one provider though, yet you can mix with other provider definitions via API.
+
+Using the REST API has the advantage of provisioning multiple, different OIDC providers.
+Depending on your use case, it has the drawback of needing additional API calls.
+
+If you only need one single provider in your installation and it is using OIDC, use the JVM options, as it
+requires fewer extra steps and allows you to keep more configuration in a single source.
+
+Provision via REST API
+^^^^^^^^^^^^^^^^^^^^^^
+
+Note: you may omit the PKCE related settings from ``factoryData`` below if you don't plan on using PKCE - default is
+disabled.
+
+Please create a :download:`my-oidc-provider.json <../_static/installation/files/root/auth-providers/oidc.json>` file, replacing every ``<...>`` with your values:
+
+.. literalinclude:: /_static/installation/files/root/auth-providers/oidc.json
+    :name: oidc-provider-example
+    :language: json
 
 Now load the configuration into your Dataverse installation using the same API as with :doc:`oauth2`:
 
@@ -80,10 +123,68 @@ The Dataverse installation will automatically try to load the provider and retri
 You should see the new provider under "Other options" on the Log In page, as described in the :doc:`/user/account`
 section of the User Guide.
 
-By default, the Log In page will show the "builtin" provider, but you can adjust this via the ``:DefaultAuthProvider``
-configuration option. For details, see :doc:`config`.
-
-.. hint::
-   In contrast to our :doc:`oauth2`, you can use multiple providers by creating distinct configurations enabled by
-   the same technology and without modifying the Dataverse Software code base (standards for the win!).
-
+.. _oidc-mpconfig:
+
+Provision via JVM Options
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A single provider may be provisioned using :ref:`jvm-options`.
+It may be accompanied by more providers configured via REST API.
+Note that this provider will only be deployed at startup time and (currently) cannot be reconfigured without a restart.
+
+All options below may be set via *MicroProfile Config API* sources. Examples: use environment variable
+``DATAVERSE_AUTH_OIDC_ENABLED`` for the ``dataverse.auth.oidc.enabled`` option or ``DATAVERSE_AUTH_OIDC_CLIENT_ID``
+for the ``dataverse.auth.oidc.client-id`` option.
+
+The following options are available:
+
+.. list-table::
+  :widths: 25 55 10 10
+  :header-rows: 1
+  :align: left
+
+  * - Option
+    - Description
+    - Mandatory
+    - Default
+  * - ``dataverse.auth.oidc.enabled``
+    - Enable or disable provisioning the provider via MicroProfile.
+    - N
+    - ``false``
+  * - ``dataverse.auth.oidc.client-id``
+    - The client-id of the application to identify it at your provider.
+    - Y
+    - \-
+  * - ``dataverse.auth.oidc.client-secret``
+    - A confidential secret to authorize application requests to the provider as legit.
+    - N
+    - \-
+  * - ``dataverse.auth.oidc.auth-server-url``
+    - The base URL of the OpenID Connect (OIDC) server as explained above.
+    - Y
+    - \-
+  * - ``dataverse.auth.oidc.pkce.enabled``
+    - Set to ``true`` to enable :ref:`PKCE <oidc-pkce>` in auth flow.
+    - N
+    - ``false``
+  * - ``dataverse.auth.oidc.pkce.method``
+    - Set code challenge method. The default value is the current best practice in the literature.
+    - N
+    - ``S256``
+  * - ``dataverse.auth.oidc.title``
+    - The UI visible name for this provider in login options.
+    - N
+    - ``OpenID Connect``
+  * - ``dataverse.auth.oidc.subtitle``
+    - A subtitle, currently not displayed by the UI.
+    - N
+    - ``OpenID Connect``
+  * - ``dataverse.auth.oidc.pkce.max-cache-size``
+    - Tune the maximum size of all OIDC providers' verifier cache (the number of outstanding PKCE-enabled auth responses).
+    - N
+    - 10000
+  * - ``dataverse.auth.oidc.pkce.max-cache-age``
+    - Tune the maximum age, in seconds, of all OIDC providers' verifier cache entries. Default is 5 minutes, equivalent to lifetime
+      of many OIDC access tokens.
+    - N
+    - 300
\ No newline at end of file
diff --git a/doc/sphinx-guides/source/installation/prep.rst b/doc/sphinx-guides/source/installation/prep.rst
index c491659cd56..abb4349d3ad 100644
--- a/doc/sphinx-guides/source/installation/prep.rst
+++ b/doc/sphinx-guides/source/installation/prep.rst
@@ -79,15 +79,24 @@ System Requirements
 Hardware Requirements
 +++++++++++++++++++++
 
-A basic Dataverse installation runs fine on modest hardware. For example, as of this writing the test installation at http://phoenix.dataverse.org is backed by a single virtual machine with two 2.8 GHz processors, 8 GB of RAM and 50 GB of disk.
+A basic Dataverse installation runs fine on modest hardware. For example, in the recent past we had a test instance backed by a single virtual machine with two 2.8 GHz processors, 8 GB of RAM and 50 GB of disk.
 
 In contrast, before we moved it to the Amazon Cloud, the production installation at https://dataverse.harvard.edu was backed by six servers with two Intel Xeon 2.53 Ghz CPUs and either 48 or 64 GB of RAM. The three servers with 48 GB of RAM run were web frontends running Glassfish 4 and Apache and were load balanced by a hardware device. The remaining three servers with 64 GB of RAM were the primary and backup database servers and a server dedicated to running Rserve. Multiple TB of storage were mounted from a SAN via NFS.
 
-Currently, the Harvard Dataverse Repository is served by four AWS server nodes: two "m4.4xlarge" instances (64GB/16 vCPU) as web frontends, one 32GB/8 vCPU ("m4.2xlarge") instance for the Solr search engine, and one 16GB/4 vCPU ("m4.xlarge") instance for R. The PostgreSQL database is served by Amazon RDS, and physical files are stored on Amazon S3.
+Currently, the Harvard Dataverse Repository is served by four AWS server nodes
 
-The Dataverse Software installation script will attempt to give your app server the right amount of RAM based on your system.
+- two instances for web frontends running Payara fronted by Apache ("m4.4xlarge" with 64 GB RAM and 16 vCPUs)
 
-Experimentation and testing with various hardware configurations is encouraged, or course, but do reach out as explained in the :doc:`intro` as needed for assistance.
+  - these are sitting behind an AWS ELB load balancer
+
+- one instance for the Solr search engine ("m4.2xlarge" with 32 GB RAM and 8 vCPUs)
+- one instance for R ("m4.xlarge" instances with 16 GB RAM and 4 vCPUs)
+
+The PostgreSQL database is served by Amazon RDS.
+
+Physical files are stored on Amazon S3. The primary bucket is replicated in real-time to a secondary bucket, which is backed up to Glacier. Deleted files are kept around on the secondary bucket for a little while for convenient recovery. In addition, we use a backup script mentioned under :doc:`/admin/backups`.
+
+Experimentation and testing with various hardware configurations is encouraged, or course. Note that the installation script will attempt to give your app server (the web frontend) the right amount of RAM based on your system.
 
 Software Requirements
 +++++++++++++++++++++
diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst
index 59de507a264..a56f4811ace 100644
--- a/doc/sphinx-guides/source/installation/prerequisites.rst
+++ b/doc/sphinx-guides/source/installation/prerequisites.rst
@@ -19,22 +19,22 @@ We assume you plan to run your Dataverse installation on Linux and we recommend
 Java
 ----
 
-The Dataverse Software requires Java SE 11 (or higher).
+The Dataverse Software requires Java SE 17 (or higher).
 
 Installing Java
 ===============
 
 The Dataverse Software should run fine with only the Java Runtime Environment (JRE) installed, but installing the Java Development Kit (JDK) is recommended so that useful tools for troubleshooting production environments are available. We recommend using Oracle JDK or OpenJDK.
 
-The Oracle JDK can be downloaded from http://www.oracle.com/technetwork/java/javase/downloads/index.html
+The Oracle JDK can be downloaded from https://www.oracle.com/technetwork/java/javase/downloads/index.html
 
 On a RHEL/derivative, install OpenJDK (devel version) using yum::
 
-	# sudo yum install java-11-openjdk
+	# sudo yum install java-17-openjdk
 
-If you have multiple versions of Java installed, Java 11 should be the default when ``java`` is invoked from the command line. You can test this by running ``java -version``.
+If you have multiple versions of Java installed, Java 17 should be the default when ``java`` is invoked from the command line. You can test this by running ``java -version``.
 
-On RHEL/derivative you can make Java 11 the default with the ``alternatives`` command, having it prompt you to select the version of Java from a list::
+On RHEL/derivative you can make Java 17 the default with the ``alternatives`` command, having it prompt you to select the version of Java from a list::
 
         # alternatives --config java
 
@@ -44,7 +44,7 @@ On RHEL/derivative you can make Java 11 the default with the ``alternatives`` co
 Payara
 ------
 
-Payara 5.2022.3 is recommended. Newer versions might work fine, regular updates are recommended.
+Payara 6.2023.8 is recommended. Newer versions might work fine. Regular updates are recommended.
 
 Installing Payara
 =================
@@ -53,25 +53,27 @@ Installing Payara
 
 	# useradd dataverse
 
-- Download and install Payara (installed in ``/usr/local/payara5`` in the example commands below)::
+- Download and install Payara (installed in ``/usr/local/payara6`` in the example commands below)::
 
-	# wget https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip
-	# unzip payara-5.2022.3.zip
-	# mv payara5 /usr/local
+	# wget https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip
+	# unzip payara-6.2023.8.zip
+	# mv payara6 /usr/local
+
+If nexus.payara.fish is ever down for maintenance, Payara distributions are also available from https://repo1.maven.org/maven2/fish/payara/distributions/payara/
 
 If you intend to install and run Payara under a service account (and we hope you do), chown -R the Payara hierarchy to root to protect it but give the service account access to the below directories:
 
 - Set service account permissions::
 
-	# chown -R root:root /usr/local/payara5
-	# chown dataverse /usr/local/payara5/glassfish/lib
-	# chown -R dataverse:dataverse /usr/local/payara5/glassfish/domains/domain1
+	# chown -R root:root /usr/local/payara6
+	# chown dataverse /usr/local/payara6/glassfish/lib
+	# chown -R dataverse:dataverse /usr/local/payara6/glassfish/domains/domain1
 
 After installation, you may chown the lib/ directory back to root; the installer only needs write access to copy the JDBC driver into that directory.
 
 - Change from ``-client`` to ``-server`` under ``<jvm-options>-client</jvm-options>``::
 
-	# vim /usr/local/payara5/glassfish/domains/domain1/config/domain.xml
+	# vim /usr/local/payara6/glassfish/domains/domain1/config/domain.xml
 
 This recommendation comes from http://www.c2b2.co.uk/middleware-blog/glassfish-4-performance-tuning-monitoring-and-troubleshooting.php among other places.
 
@@ -95,10 +97,14 @@ Also note that Payara may utilize more than the default number of file descripto
 PostgreSQL
 ----------
 
+PostgreSQL 13 is recommended because it's the version we test against. Version 10 or higher is required because that's what's `supported by Flyway <https://documentation.red-gate.com/fd/postgresql-184127604.html>`_, which we use for database migrations.
+
+You are welcome to experiment with newer versions of PostgreSQL, but please note that as of PostgreSQL 15, permissions have been restricted on the ``public`` schema (`release notes <https://www.postgresql.org/docs/release/15.0/>`_, `EDB blog post <https://www.enterprisedb.com/blog/new-public-schema-permissions-postgresql-15>`_, `Crunchy Data blog post <https://www.crunchydata.com/blog/be-ready-public-schema-changes-in-postgres-15>`_). The Dataverse installer has been updated to restore the old permissions, but this may not be a long term solution.
+
 Installing PostgreSQL
 =====================
 
-The application has been tested with PostgreSQL versions up to 13 and version 10+ is required. We recommend installing the latest version that is available for your OS distribution. *For example*, to install PostgreSQL 13 under RHEL7/derivative::
+*For example*, to install PostgreSQL 13 under RHEL7/derivative::
 
 	# yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm
 	# yum makecache fast
@@ -152,12 +158,12 @@ Configuring Database Access for the Dataverse Installation (and the Dataverse So
 Solr
 ----
 
-The Dataverse Software search index is powered by Solr.
+The Dataverse software search index is powered by Solr.
 
 Supported Versions
 ==================
 
-The Dataverse Software has been tested with Solr version 8.11.1. Future releases in the 8.x series are likely to be compatible; however, this cannot be confirmed until they are officially tested. Major releases above 8.x (e.g. 9.x) are not supported.
+The Dataverse software has been tested with Solr version 9.3.0. Future releases in the 9.x series are likely to be compatible. Please get in touch (:ref:`support`) if you are having trouble with a newer version.
 
 Installing Solr
 ===============
@@ -172,19 +178,19 @@ Become the ``solr`` user and then download and configure Solr::
 
         su - solr
         cd /usr/local/solr
-        wget https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz
-        tar xvzf solr-8.11.1.tgz
-        cd solr-8.11.1
+        wget https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz
+        tar xvzf solr-9.3.0.tgz
+        cd solr-9.3.0
         cp -r server/solr/configsets/_default server/solr/collection1
 
 You should already have a "dvinstall.zip" file that you downloaded from https://github.com/IQSS/dataverse/releases . Unzip it into ``/tmp``. Then copy the files into place::
 
-        cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf
-        cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-8.11.1/server/solr/collection1/conf
+        cp /tmp/dvinstall/schema*.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf
+        cp /tmp/dvinstall/solrconfig.xml /usr/local/solr/solr-9.3.0/server/solr/collection1/conf
 
 Note: The Dataverse Project team has customized Solr to boost results that come from certain indexed elements inside the Dataverse installation, for example prioritizing results from Dataverse collections over Datasets. If you would like to remove this, edit your ``solrconfig.xml`` and remove the ``<str name="qf">`` element and its contents. If you have ideas about how this boosting could be improved, feel free to contact us through our Google Group https://groups.google.com/forum/#!forum/dataverse-dev .
 
-A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-8.11.1/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400``
+A Dataverse installation requires a change to the ``jetty.xml`` file that ships with Solr. Edit ``/usr/local/solr/solr-9.3.0/server/etc/jetty.xml`` , increasing ``requestHeaderSize`` from ``8192`` to ``102400``
 
 Solr will warn about needing to increase the number of file descriptors and max processes in a production environment but will still run with defaults. We have increased these values to the recommended levels by adding ulimit -n 65000 to the init script, and the following to ``/etc/security/limits.conf``::
 
@@ -203,7 +209,26 @@ Solr launches asynchronously and attempts to use the ``lsof`` binary to watch fo
 
 Finally, you need to tell Solr to create the core "collection1" on startup::
 
-        echo "name=collection1" > /usr/local/solr/solr-8.11.1/server/solr/collection1/core.properties
+        echo "name=collection1" > /usr/local/solr/solr-9.3.0/server/solr/collection1/core.properties
+
+Dataverse collection ("dataverse") page uses Solr very heavily. On a busy instance this may cause the search engine to become the performance bottleneck, making these pages take increasingly longer to load, potentially affecting the overall performance of the application and/or causing Solr itself to crash. If this is observed on your instance, we recommend uncommenting the following lines in the ``<circuitBreaker ...>`` section of the ``solrconfig.xml`` file::
+
+  <str name="memEnabled">true</str>
+  <str name="memThreshold">75</str>
+
+and::
+
+  <str name="cpuEnabled">true</str>
+  <str name="cpuThreshold">75</str>
+
+This will activate Solr "circuit breaker" mechanisms that make it start dropping incoming requests with the HTTP code 503 when it starts experiencing load issues. As of Dataverse 6.1, the collection page will recognize this condition and display a customizeable message to the users informing them that the search engine is unavailable because of heavy load, with the assumption that the condition is transitive and suggesting that they try again later. This is still an inconvenience to the users, but still a more graceful handling of the problem, rather than letting the pages time out or causing crashes. You may need to experiment and adjust the threshold values defined in the lines above. 
+
+If this becomes a common issue, another temporary workaround an admin may choose to use is to enable the following setting::
+
+  curl -X PUT -d true "http://localhost:8080/api/admin/settings/:DisableSolrFacets"
+
+This will make the collection page show the search results without the usual search facets on the left side of the page. Another customizeable message will be shown in that column informing the users that facets are temporarily unavailable. Generating these facets is more resource-intensive for Solr than the main search results themselves, so applying this measure will significantly reduce the load on the search engine. 
+
 
 Solr Init Script
 ================
@@ -227,11 +252,9 @@ For systems using init.d (like CentOS 6), download this :download:`Solr init scr
 Securing Solr
 =============
 
-Our sample init script and systemd service file linked above tell Solr to only listen on localhost (127.0.0.1). We strongly recommend that you also use a firewall to block access to the Solr port (8983) from outside networks, for added redundancy.
-
-It is **very important** not to allow direct access to the Solr API from outside networks! Otherwise, any host that can reach the Solr port (8983 by default) can add or delete data, search unpublished data, and even reconfigure Solr. For more information, please see https://lucene.apache.org/solr/guide/7_3/securing-solr.html. A particularly serious security issue that has been identified recently allows a potential intruder to remotely execute arbitrary code on the system. See `RCE in Solr via Velocity Template <https://github.com/veracode-research/solr-injection#7-cve-2019-xxxx-rce-via-velocity-template-by-_s00py>`_ for more information.
+As of version 9.3.0, Solr listens solely on localhost for security reasons. If your installation will run Solr on its own host, you will need to edit ``bin/solr.in.sh``, setting ``JETTY_HOST`` to the external IP address of your Solr server to tell Solr to accept external connections.
 
-If you're running your Dataverse installation across multiple service hosts you'll want to remove the jetty.host argument (``-j jetty.host=127.0.0.1``) from the startup command line, but make sure Solr is behind a firewall and only accessible by the Dataverse installation host(s), by specific ip address(es).
+We strongly recommend that you also use a firewall to block access to the Solr port (8983) from outside networks. It is **very important** not to allow direct access to the Solr API from outside networks! Otherwise, any host that can reach Solr can add or delete data, search unpublished data, and even reconfigure Solr. For more information, please see https://solr.apache.org/guide/solr/latest/deployment-guide/securing-solr.html
 
 We additionally recommend that the Solr service account's shell be disabled, as it isn't necessary for daily operation::
 
@@ -261,7 +284,7 @@ Installing jq
 or you may install it manually::
 
         # cd /usr/bin
-        # wget http://stedolan.github.io/jq/download/linux64/jq
+        # wget https://stedolan.github.io/jq/download/linux64/jq
         # chmod +x jq
         # jq --version
 
diff --git a/doc/sphinx-guides/source/installation/shibboleth.rst b/doc/sphinx-guides/source/installation/shibboleth.rst
index cd0fbda77a6..3a2e1b99c70 100644
--- a/doc/sphinx-guides/source/installation/shibboleth.rst
+++ b/doc/sphinx-guides/source/installation/shibboleth.rst
@@ -76,7 +76,7 @@ A ``jk-connector`` network listener should have already been set up when you ran
 
 You can verify this with ``./asadmin list-network-listeners``. 
 
-This enables the `AJP protocol <http://en.wikipedia.org/wiki/Apache_JServ_Protocol>`_ used in Apache configuration files below.
+This enables the `AJP protocol <https://en.wikipedia.org/wiki/Apache_JServ_Protocol>`_ used in Apache configuration files below.
 
 SSLEngine Warning Workaround
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -93,7 +93,7 @@ Configure Apache
 Enforce HTTPS
 ~~~~~~~~~~~~~
 
-To prevent attacks such as `FireSheep <http://en.wikipedia.org/wiki/Firesheep>`_, HTTPS should be enforced. https://wiki.apache.org/httpd/RewriteHTTPToHTTPS provides a good method. You **could** copy and paste that those "rewrite rule" lines into Apache's main config file at ``/etc/httpd/conf/httpd.conf`` but using Apache's "virtual hosts" feature is recommended so that you can leave the main configuration file alone and drop a host-specific file into place.
+To prevent attacks such as `FireSheep <https://en.wikipedia.org/wiki/Firesheep>`_, HTTPS should be enforced. https://wiki.apache.org/httpd/RewriteHTTPToHTTPS provides a good method. You **could** copy and paste that those "rewrite rule" lines into Apache's main config file at ``/etc/httpd/conf/httpd.conf`` but using Apache's "virtual hosts" feature is recommended so that you can leave the main configuration file alone and drop a host-specific file into place.
 
 Below is an example of how "rewrite rule" lines look within a ``VirtualHost`` block. Download a :download:`sample file <../_static/installation/files/etc/httpd/conf.d/dataverse.example.edu.conf>` , edit it to substitute your own hostname under ``ServerName``, and place it at ``/etc/httpd/conf.d/dataverse.example.edu.conf`` or a filename that matches your hostname. The file must be in ``/etc/httpd/conf.d`` and must end in ".conf" to be included in Apache's configuration.
 
@@ -235,7 +235,7 @@ Run semodule
 
 Silent is golden. No output is expected. This will place a file in ``/etc/selinux/targeted/modules/active/modules/shibboleth.pp`` and include "shibboleth" in the output of ``semodule -l``. See the ``semodule`` man page if you ever want to remove or disable the module you just added.
 
-Congrats! You've made the creator of http://stopdisablingselinux.com proud. :)
+Congrats! You've made the creator of https://stopdisablingselinux.com proud. :)
 
 Restart Apache and Shibboleth
 -----------------------------
diff --git a/doc/sphinx-guides/source/style/foundations.rst b/doc/sphinx-guides/source/style/foundations.rst
index 31e0c314a05..cc193666868 100755
--- a/doc/sphinx-guides/source/style/foundations.rst
+++ b/doc/sphinx-guides/source/style/foundations.rst
@@ -9,7 +9,7 @@ Foundation elements are the very basic building blocks to create a page in Datav
 Grid Layout
 ===========
 
-`Bootstrap <http://getbootstrap.com/css/#grid>`__ provides a responsive, fluid, 12-column grid system that we use to organize our page layouts.
+`Bootstrap <https://getbootstrap.com/css/#grid>`__ provides a responsive, fluid, 12-column grid system that we use to organize our page layouts.
 
 We use the fixed-width ``.container`` class which provides responsive widths (i.e. auto, 750px, 970px or 1170px) based on media queries for the page layout, with a series of rows and columns for the content.
 
@@ -42,7 +42,7 @@ The grid layout uses ``.col-sm-*`` classes for horizontal groups of columns, ins
 Typography
 ==========
 
-The typeface, text size, and line-height are set in the `Bootstrap CSS <http://getbootstrap.com/css/#type>`__. We use Bootstrap's global default ``font-size`` of **14px**, with a ``line-height`` of **1.428**, which is applied to the ``<body>`` and all paragraphs.
+The typeface, text size, and line-height are set in the `Bootstrap CSS <https://getbootstrap.com/css/#type>`__. We use Bootstrap's global default ``font-size`` of **14px**, with a ``line-height`` of **1.428**, which is applied to the ``<body>`` and all paragraphs.
 
 .. code-block:: css
 
@@ -57,7 +57,7 @@ The typeface, text size, and line-height are set in the `Bootstrap CSS <http://g
 Color Palette
 =============
 
-The default color palette is set in the `Bootstrap CSS <http://getbootstrap.com/css/#less-variables-colors>`__. It provides the background, border, text and link colors used across the application.
+The default color palette is set in the `Bootstrap CSS <https://getbootstrap.com/css/#less-variables-colors>`__. It provides the background, border, text and link colors used across the application.
 
 
 Brand Colors
@@ -138,7 +138,7 @@ We use our brand color, a custom burnt orange ``{color:#C55B28;}``, which is set
 Text Colors
 -----------
 
-Text color is the default setting from `Bootstrap CSS <http://getbootstrap.com/css/#less-variables-scaffolding>`__.
+Text color is the default setting from `Bootstrap CSS <https://getbootstrap.com/css/#less-variables-scaffolding>`__.
 
 .. code-block:: css
 
@@ -163,7 +163,7 @@ Text color is the default setting from `Bootstrap CSS <http://getbootstrap.com/c
 Link Colors
 -----------
 
-Link color is the default setting from `Bootstrap CSS <http://getbootstrap.com/css/#less-variables-links>`__. The hover state color is set to 15% darker.
+Link color is the default setting from `Bootstrap CSS <https://getbootstrap.com/css/#less-variables-links>`__. The hover state color is set to 15% darker.
 
 **Please note**, there is a CSS override issue with the link color due to the use of both a Bootstrap stylesheet and a PrimeFaces stylesheet in the UI. We've added CSS such as ``.ui-widget-content a {color: #428BCA;}`` to our stylesheet to keep the link color consistent.
 
@@ -204,7 +204,7 @@ Link color is the default setting from `Bootstrap CSS <http://getbootstrap.com/c
 Contextual Classes
 ------------------
 
-Contextual classes from `Bootstrap CSS <http://getbootstrap.com/css/#helper-classes>`__ can be used to style background and text colors. Semantic colors include various colors assigned to meaningful contextual values. We convey meaning through color with a handful of emphasis utility classes.
+Contextual classes from `Bootstrap CSS <https://getbootstrap.com/css/#helper-classes>`__ can be used to style background and text colors. Semantic colors include various colors assigned to meaningful contextual values. We convey meaning through color with a handful of emphasis utility classes.
 
 .. raw:: html
 
@@ -259,7 +259,7 @@ We use various icons across the application, which we get from Bootstrap, FontCu
 Bootstrap Glyphicons
 --------------------
 
-There are over 250 glyphs in font format from the Glyphicon Halflings set provided by `Bootstrap <http://getbootstrap.com/components/#glyphicons>`__. We utilize these mainly as icons inside of buttons and in message blocks.
+There are over 250 glyphs in font format from the Glyphicon Halflings set provided by `Bootstrap <https://getbootstrap.com/components/#glyphicons>`__. We utilize these mainly as icons inside of buttons and in message blocks.
 
 .. raw:: html
 
@@ -305,7 +305,7 @@ The :doc:`/developers/fontcustom` section of the Developer Guide explains how to
 Socicon Icon Font 
 -----------------
 
-We use `Socicon <http://www.socicon.com>`__ for our custom social icons. In the footer we use icons for Twitter and Github. In our Share feature, we also use custom social icons to allow users to select from a list of social media channels.
+We use `Socicon <https://www.socicon.com>`__ for our custom social icons. In the footer we use icons for Twitter and Github. In our Share feature, we also use custom social icons to allow users to select from a list of social media channels.
 
 .. raw:: html
 
diff --git a/doc/sphinx-guides/source/style/patterns.rst b/doc/sphinx-guides/source/style/patterns.rst
index e96f17dc2ec..c6602ffa26e 100644
--- a/doc/sphinx-guides/source/style/patterns.rst
+++ b/doc/sphinx-guides/source/style/patterns.rst
@@ -1,7 +1,7 @@
 Patterns
 ++++++++
 
-Patterns are what emerge when using the foundation elements together with basic objects like buttons and alerts, more complex Javascript components from `Bootstrap <http://getbootstrap.com/components/>`__ like tooltips and dropdowns, and AJAX components from `PrimeFaces <https://www.primefaces.org/showcase/>`__ like datatables and commandlinks.
+Patterns are what emerge when using the foundation elements together with basic objects like buttons and alerts, more complex Javascript components from `Bootstrap <https://getbootstrap.com/components/>`__ like tooltips and dropdowns, and AJAX components from `PrimeFaces <https://www.primefaces.org/showcase/>`__ like datatables and commandlinks.
 
 .. contents:: |toctitle|
   :local:
@@ -9,7 +9,7 @@ Patterns are what emerge when using the foundation elements together with basic
 Navbar
 ======
 
-The `Navbar component <http://getbootstrap.com/components/#navbar>`__ from Bootstrap spans the top of the application and contains the logo/branding, aligned to the left, plus search form and links, aligned to the right.
+The `Navbar component <https://getbootstrap.com/components/#navbar>`__ from Bootstrap spans the top of the application and contains the logo/branding, aligned to the left, plus search form and links, aligned to the right.
 
 When logged in, the account name is a dropdown menu, linking the user to account-specific content and the log out link.
 
@@ -74,7 +74,7 @@ When logged in, the account name is a dropdown menu, linking the user to account
 Breadcrumbs
 ===========
 
-The breadcrumbs are displayed under the header, and provide a trail of links for users to navigate the hierarchy of containing objects, from file to dataset to Dataverse collection. It utilizes a JSF `repeat component <http://docs.oracle.com/javaee/6/javaserverfaces/2.0/docs/pdldocs/facelets/ui/repeat.html>`_ to iterate through the breadcrumbs.
+The breadcrumbs are displayed under the header, and provide a trail of links for users to navigate the hierarchy of containing objects, from file to dataset to Dataverse collection. It utilizes a JSF `repeat component <https://docs.oracle.com/javaee/6/javaserverfaces/2.0/docs/pdldocs/facelets/ui/repeat.html>`_ to iterate through the breadcrumbs.
 
 .. raw:: html
 
@@ -108,7 +108,7 @@ The breadcrumbs are displayed under the header, and provide a trail of links for
 Tables
 ======
 
-Most tables use the `DataTable components <https://www.primefaces.org/showcase/ui/data/datatable/basic.xhtml>`__ from PrimeFaces and are styled using the `Tables component <http://getbootstrap.com/css/#tables>`__ from Bootstrap.
+Most tables use the `DataTable components <https://www.primefaces.org/showcase/ui/data/datatable/basic.xhtml>`__ from PrimeFaces and are styled using the `Tables component <https://getbootstrap.com/css/#tables>`__ from Bootstrap.
 
 .. raw:: html
 
@@ -187,7 +187,7 @@ Most tables use the `DataTable components <https://www.primefaces.org/showcase/u
 Forms
 =====
 
-Forms fulfill various functions across the site, but we try to style them consistently. We use the ``.form-horizontal`` layout, which uses ``.form-group`` to create a grid of rows for the labels and inputs. The consistent style of forms is maintained using the `Forms component <http://getbootstrap.com/css/#forms>`__ from Bootstrap. Form elements like the `InputText component <https://www.primefaces.org/showcase/ui/input/inputText.xhtml>`__ from PrimeFaces are kept looking clean and consistent across each page.
+Forms fulfill various functions across the site, but we try to style them consistently. We use the ``.form-horizontal`` layout, which uses ``.form-group`` to create a grid of rows for the labels and inputs. The consistent style of forms is maintained using the `Forms component <https://getbootstrap.com/css/#forms>`__ from Bootstrap. Form elements like the `InputText component <https://www.primefaces.org/showcase/ui/input/inputText.xhtml>`__ from PrimeFaces are kept looking clean and consistent across each page.
 
 .. raw:: html
 
@@ -289,7 +289,7 @@ Here are additional form elements that are common across many pages, including r
 Buttons
 =======
 
-There are various types of buttons for various actions, so we have many components to use, including the `CommandButton component <https://www.primefaces.org/showcase/ui/button/commandButton.xhtml>`__ and `CommandLink component <https://www.primefaces.org/showcase/ui/button/commandLink.xhtml>`__ from PrimeFaces, as well as the basic JSF `Link component <http://docs.oracle.com/javaee/6/javaserverfaces/2.0/docs/pdldocs/facelets/h/link.html>`__ and `OutputLink component <http://docs.oracle.com/javaee/6/javaserverfaces/2.0/docs/pdldocs/facelets/h/outputLink.html>`__. Those are styled using the `Buttons component <http://getbootstrap.com/css/#buttons>`__, `Button Groups component <http://getbootstrap.com/components/#btn-groups>`__ and `Buttons Dropdowns component <http://getbootstrap.com/components/#btn-dropdowns>`__ from Bootstrap.
+There are various types of buttons for various actions, so we have many components to use, including the `CommandButton component <https://www.primefaces.org/showcase/ui/button/commandButton.xhtml>`__ and `CommandLink component <https://www.primefaces.org/showcase/ui/button/commandLink.xhtml>`__ from PrimeFaces, as well as the basic JSF `Link component <https://docs.oracle.com/javaee/6/javaserverfaces/2.0/docs/pdldocs/facelets/h/link.html>`__ and `OutputLink component <https://docs.oracle.com/javaee/6/javaserverfaces/2.0/docs/pdldocs/facelets/h/outputLink.html>`__. Those are styled using the `Buttons component <https://getbootstrap.com/css/#buttons>`__, `Button Groups component <https://getbootstrap.com/components/#btn-groups>`__ and `Buttons Dropdowns component <https://getbootstrap.com/components/#btn-dropdowns>`__ from Bootstrap.
 
 Action Buttons
 --------------
@@ -668,7 +668,7 @@ Another variation of icon-only buttons uses the ``.btn-link`` style class from B
 Pagination
 ==========
 
-We use the `Pagination component <http://getbootstrap.com/components/#pagination>`__ from Bootstrap for paging through search results.
+We use the `Pagination component <https://getbootstrap.com/components/#pagination>`__ from Bootstrap for paging through search results.
 
 .. raw:: html
 
@@ -738,7 +738,7 @@ We use the `Pagination component <http://getbootstrap.com/components/#pagination
 Labels
 ======
 
-The `Labels component <http://getbootstrap.com/components/#labels>`__ from Bootstrap is used for publication status (DRAFT, In Review, Unpublished, Deaccessioned), and Dataset version, as well as Tabular Data Tags (Survey, Time Series, Panel, Event, Genomics, Network, Geospatial).
+The `Labels component <https://getbootstrap.com/components/#labels>`__ from Bootstrap is used for publication status (DRAFT, In Review, Unpublished, Deaccessioned), and Dataset version, as well as Tabular Data Tags (Survey, Time Series, Panel, Event, Genomics, Network, Geospatial).
 
 .. raw:: html
 
@@ -768,7 +768,7 @@ The `Labels component <http://getbootstrap.com/components/#labels>`__ from Boots
 Alerts
 ======
 
-For our help/information, success, warning, and error message blocks we use a custom built UI component based on the `Alerts component <http://getbootstrap.com/components/#alerts>`__ from Bootstrap.
+For our help/information, success, warning, and error message blocks we use a custom built UI component based on the `Alerts component <https://getbootstrap.com/components/#alerts>`__ from Bootstrap.
 
 .. raw:: html
 
@@ -859,9 +859,9 @@ Style classes can be added to ``p``, ``div``, ``span`` and other elements to add
 Images
 ======
 
-For images, we use the `GraphicImage  component <https://www.primefaces.org/showcase/ui/multimedia/graphicImage.xhtml>`__ from PrimeFaces, or the basic JSF `GraphicImage component <http://docs.oracle.com/javaee/6/javaserverfaces/2.1/docs/vdldocs/facelets/h/graphicImage.html>`__.
+For images, we use the `GraphicImage  component <https://www.primefaces.org/showcase/ui/multimedia/graphicImage.xhtml>`__ from PrimeFaces, or the basic JSF `GraphicImage component <https://docs.oracle.com/javaee/6/javaserverfaces/2.1/docs/vdldocs/facelets/h/graphicImage.html>`__.
 
-To display images in a responsive way, they are styled with ``.img-responsive``, an `Images CSS class <http://getbootstrap.com/css/#images>`__ from Bootstrap.
+To display images in a responsive way, they are styled with ``.img-responsive``, an `Images CSS class <https://getbootstrap.com/css/#images>`__ from Bootstrap.
 
 .. raw:: html
 
@@ -879,7 +879,7 @@ To display images in a responsive way, they are styled with ``.img-responsive``,
 Panels
 ======
 
-The most common of our containers, the `Panels component <http://getbootstrap.com/components/#panels>`__ from Bootstrap is used to add a border and padding around sections of content like metadata blocks. Displayed with a header and/or footer, it can also be used with the  `Collapse plugin <http://getbootstrap.com/javascript/#collapse>`__ from Bootstrap.
+The most common of our containers, the `Panels component <https://getbootstrap.com/components/#panels>`__ from Bootstrap is used to add a border and padding around sections of content like metadata blocks. Displayed with a header and/or footer, it can also be used with the  `Collapse plugin <https://getbootstrap.com/javascript/#collapse>`__ from Bootstrap.
 
 .. raw:: html
 
@@ -943,7 +943,7 @@ Tabs
 
 Tabs are used to provide content panes on a page that allow the user to view different sections of content without navigating to a different page.
 
-We use the `TabView component <https://www.primefaces.org/showcase/ui/panel/tabView.xhtml>`__ from PrimeFaces, which is styled using the `Tab component <http://getbootstrap.com/javascript/#tabs>`__ from Bootstrap.
+We use the `TabView component <https://www.primefaces.org/showcase/ui/panel/tabView.xhtml>`__ from PrimeFaces, which is styled using the `Tab component <https://getbootstrap.com/javascript/#tabs>`__ from Bootstrap.
 
 .. raw:: html
 
@@ -989,7 +989,7 @@ Modals are dialog prompts that act as popup overlays, but don't create a new bro
 
 Buttons usually provide the UI prompt. A user clicks the button, which then opens a `Dialog component <https://www.primefaces.org/showcase/ui/overlay/dialog/basic.xhtml>`__  or `Confirm Dialog component <https://www.primefaces.org/showcase/ui/overlay/confirmDialog.xhtml>`__  from PrimeFaces that displays the modal with the necessary information and actions to take.
 
-The modal is styled using the `Modal component <http://getbootstrap.com/javascript/#modals>`__ from Bootstrap, for a popup window that prompts a user for information, with overlay and a backdrop, then header, content, and buttons. We can use style classes from Bootstrap for large (``.bs-example-modal-lg``) and small (``.bs-example-modal-sm``) width options.
+The modal is styled using the `Modal component <https://getbootstrap.com/javascript/#modals>`__ from Bootstrap, for a popup window that prompts a user for information, with overlay and a backdrop, then header, content, and buttons. We can use style classes from Bootstrap for large (``.bs-example-modal-lg``) and small (``.bs-example-modal-sm``) width options.
 
 .. raw:: html
 
diff --git a/doc/sphinx-guides/source/user/account.rst b/doc/sphinx-guides/source/user/account.rst
index 12cc54c7fde..81c416bafd1 100755
--- a/doc/sphinx-guides/source/user/account.rst
+++ b/doc/sphinx-guides/source/user/account.rst
@@ -109,7 +109,7 @@ If you are leaving your institution and need to convert your Dataverse installat
 ORCID Log In
 ~~~~~~~~~~~~~
 
-You can set up your Dataverse installation account to allow you to log in using your ORCID credentials. ORCID® is an independent non-profit effort to provide an open registry of unique researcher identifiers and open services to link research activities and organizations to these identifiers. Learn more at `orcid.org <http://orcid.org>`_. 
+You can set up your Dataverse installation account to allow you to log in using your ORCID credentials. ORCID® is an independent non-profit effort to provide an open registry of unique researcher identifiers and open services to link research activities and organizations to these identifiers. Learn more at `orcid.org <https://orcid.org>`_. 
 
 Create a Dataverse installation account using ORCID
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -146,6 +146,8 @@ Microsoft Azure AD, GitHub, and Google Log In
 
 You can also convert your Dataverse installation account to use authentication provided by GitHub, Microsoft, or Google. These options may be found in the "Other options" section of the log in page, and function similarly to how ORCID is outlined above. If you would like to convert your account away from using one of these services for log in, then you can follow the same steps as listed above for converting away from the ORCID log in.
 
+.. _my-data:
+
 My Data
 -------
 
diff --git a/doc/sphinx-guides/source/user/appendix.rst b/doc/sphinx-guides/source/user/appendix.rst
index 7d60054ae17..878da96475a 100755
--- a/doc/sphinx-guides/source/user/appendix.rst
+++ b/doc/sphinx-guides/source/user/appendix.rst
@@ -22,13 +22,13 @@ Supported Metadata
 
 Detailed below are what metadata schemas we support for Citation and Domain Specific Metadata in the Dataverse Project:
 
-- `Citation Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodHFEWGpoa19ia3pldEFyVFR0aFVGa0E#gid=0>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/citation.tsv>`__): compliant with `DDI Lite <http://www.ddialliance.org/specification/ddi2.1/lite/index.html>`_, `DDI 2.5 Codebook <http://www.ddialliance.org/>`__, `DataCite 3.1 <http://schema.datacite.org/meta/kernel-3.1/doc/DataCite-MetadataKernel_v3.1.pdf>`__, and Dublin Core's `DCMI Metadata Terms <http://dublincore.org/documents/dcmi-terms/>`__ . Language field uses `ISO 639-1 <https://www.loc.gov/standards/iso639-2/php/English_list.php>`__ controlled vocabulary.
-- `Geospatial Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodHFEWGpoa19ia3pldEFyVFR0aFVGa0E#gid=4>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/geospatial.tsv>`__): compliant with DDI Lite, DDI 2.5 Codebook, DataCite, and Dublin Core. Country / Nation field uses `ISO 3166-1 <http://en.wikipedia.org/wiki/ISO_3166-1>`_ controlled vocabulary.
+- `Citation Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodHFEWGpoa19ia3pldEFyVFR0aFVGa0E#gid=0>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/citation.tsv>`__): compliant with `DDI Lite <https://www.ddialliance.org/specification/ddi2.1/lite/index.html>`_, `DDI 2.5 Codebook <https://www.ddialliance.org/>`__, `DataCite 3.1 <https://schema.datacite.org/meta/kernel-3.1/doc/DataCite-MetadataKernel_v3.1.pdf>`__, and Dublin Core's `DCMI Metadata Terms <https://dublincore.org/documents/dcmi-terms/>`__ . Language field uses `ISO 639-1 <https://www.loc.gov/standards/iso639-2/php/English_list.php>`__ controlled vocabulary.
+- `Geospatial Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodHFEWGpoa19ia3pldEFyVFR0aFVGa0E#gid=4>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/geospatial.tsv>`__): compliant with DDI Lite, DDI 2.5 Codebook, DataCite, and Dublin Core. Country / Nation field uses `ISO 3166-1 <https://en.wikipedia.org/wiki/ISO_3166-1>`_ controlled vocabulary.
 - `Social Science & Humanities Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodHFEWGpoa19ia3pldEFyVFR0aFVGa0E#gid=1>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/social_science.tsv>`__): compliant with DDI Lite, DDI 2.5 Codebook, and Dublin Core.
 - `Astronomy and Astrophysics Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodHFEWGpoa19ia3pldEFyVFR0aFVGa0E#gid=3>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/astrophysics.tsv>`__): These metadata elements can be mapped/exported to the International Virtual Observatory Alliance’s (IVOA) 
-  `VOResource Schema format <http://www.ivoa.net/documents/latest/RM.html>`__ and is based on
-  `Virtual Observatory (VO) Discovery and Provenance Metadata <http://perma.cc/H5ZJ-4KKY>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/astrophysics.tsv>`__).
-- `Life Sciences Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodHFEWGpoa19ia3pldEFyVFR0aFVGa0E#gid=2>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/biomedical.tsv>`__): based on `ISA-Tab Specification <https://isa-specs.readthedocs.io/en/latest/isamodel.html>`__, along with controlled vocabulary from subsets of the `OBI Ontology <http://bioportal.bioontology.org/ontologies/OBI>`__ and the `NCBI Taxonomy for Organisms <http://www.ncbi.nlm.nih.gov/Taxonomy/taxonomyhome.html/>`__.
+  `VOResource Schema format <https://www.ivoa.net/documents/latest/RM.html>`__ and is based on 
+  `Virtual Observatory (VO) Discovery and Provenance Metadata <https://perma.cc/H5ZJ-4KKY>`__.
+- `Life Sciences Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodHFEWGpoa19ia3pldEFyVFR0aFVGa0E#gid=2>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/biomedical.tsv>`__): based on `ISA-Tab Specification <https://isa-specs.readthedocs.io/en/latest/isamodel.html>`__, along with controlled vocabulary from subsets of the `OBI Ontology <https://bioportal.bioontology.org/ontologies/OBI>`__ and the `NCBI Taxonomy for Organisms <https://www.ncbi.nlm.nih.gov/Taxonomy/taxonomyhome.html/>`__.
 - `Journal Metadata <https://docs.google.com/spreadsheets/d/13HP-jI_cwLDHBetn9UKTREPJ_F4iHdAvhjmlvmYdSSw/edit#gid=8>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/journals.tsv>`__): based on the `Journal Archiving and Interchange Tag Set, version 1.2 <https://jats.nlm.nih.gov/archiving/tag-library/1.2/chapter/how-to-read.html>`__.
 
 Experimental Metadata
@@ -37,7 +37,7 @@ Experimental Metadata
 Unlike supported metadata, experimental metadata is not enabled by default in a new Dataverse installation. Feedback via any `channel <https://dataverse.org/contact>`_ is welcome!
 
 - `CodeMeta Software Metadata <https://docs.google.com/spreadsheets/d/e/2PACX-1vTE-aSW0J7UQ0prYq8rP_P_AWVtqhyv46aJu9uPszpa9_UuOWRsyFjbWFDnCd7us7PSIpW7Qg2KwZ8v/pub>`__: based on the `CodeMeta Software Metadata Schema, version 2.0 <https://codemeta.github.io/terms/>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/codemeta.tsv>`__)
-- `Computational Workflow Metadata <https://docs.google.com/spreadsheets/d/13HP-jI_cwLDHBetn9UKTREPJ_F4iHdAvhjmlvmYdSSw/edit#gid=447508596>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/computationalworkflow.tsv>`__): adapted from `Bioschemas Computational Workflow Profile, version 1.0 <https://bioschemas.org/profiles/ComputationalWorkflow/1.0-RELEASE>`__ and `Codemeta <https://codemeta.github.io/terms/>`__.
+- `Computational Workflow Metadata <https://docs.google.com/spreadsheets/d/13HP-jI_cwLDHBetn9UKTREPJ_F4iHdAvhjmlvmYdSSw/edit#gid=447508596>`__ (`see .tsv version <https://github.com/IQSS/dataverse/blob/master/scripts/api/data/metadatablocks/computational_workflow.tsv>`__): adapted from `Bioschemas Computational Workflow Profile, version 1.0 <https://bioschemas.org/profiles/ComputationalWorkflow/1.0-RELEASE>`__ and `Codemeta <https://codemeta.github.io/terms/>`__.
 
 Please note: these custom metadata schemas are not included in the Solr schema for indexing by default, you will need
 to add them as necessary for your custom metadata blocks. See "Update the Solr Schema" in :doc:`../admin/metadatacustomization`.
diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst
index 31dd7f9cf78..708113f9a99 100755
--- a/doc/sphinx-guides/source/user/dataset-management.rst
+++ b/doc/sphinx-guides/source/user/dataset-management.rst
@@ -150,6 +150,11 @@ Additional command line arguments are available to make the DVUploader list what
 
 DVUploader is a community-developed tool, and its creation was primarily supported by the Texas Digital Library. Further information and support for DVUploader can be sought at `the project's GitHub repository <https://github.com/GlobalDataverseCommunityConsortium/dataverse-uploader>`_ . 
 
+Integrations Dashboard Uploader
+-------------------------------
+
+There is an experimental uploader described at :ref:`integrations-dashboard` that provides a graphical user interface (GUI) for uploading files from a local file system and various remote locations such as GitHub.
+
 .. _duplicate-files:
 
 Duplicate Files
@@ -195,6 +200,7 @@ Previewers are available for the following file types:
 
 - Text
 - PDF
+- Markdown
 - Tabular (CSV, Excel, etc., see :doc:`tabulardataingest/index`)
 - Code (R, etc.)
 - Images (PNG, GIF, JPG)
@@ -203,7 +209,9 @@ Previewers are available for the following file types:
 - Zip (preview and extract/download)
 - HTML
 - GeoJSON
-- NetCDF/HDF5 (NcML format)
+- GeoTIFF
+- Shapefile
+- NetCDF/HDF5
 - Hypothes.is
 
 Additional file types will be added to the `dataverse-previewers <https://github.com/gdcc/dataverse-previewers>`_ repo before they are listed above so please check there for the latest information or to request (or contribute!) an additional file previewer.
@@ -220,12 +228,13 @@ Additional download options available for tabular data (found in the same drop-d
 - As tab-delimited data (with the variable names in the first row); 
 - The original file uploaded by the user; 
 - Saved as R data (if the original file was not in R format); 
-- Variable Metadata (as a `DDI Codebook <http://www.ddialliance.org/Specification/DDI-Codebook/>`_ XML file);
-- Data File Citation (currently in either RIS, EndNote XML, or BibTeX format). 
-
+- Variable Metadata (as a `DDI Codebook <https://www.ddialliance.org/Specification/DDI-Codebook/>`_ XML file);
+- Data File Citation (currently in either RIS, EndNote XML, or BibTeX format).
 
 Differentially Private (DP) Metadata can also be accessed for restricted tabular files if the data depositor has created a DP Metadata Release. See :ref:`dp-release-create` for more information.
 
+.. _research-code:
+
 Research Code
 -------------
 
@@ -241,7 +250,7 @@ The following are general guidelines applicable to all programming languages.
 - Consider providing notes (in the README) on the expected code outputs or adding tests in the code, which would ensure that its functionality is intact.
 
 Capturing code dependencies will help other researchers recreate the necessary runtime environment. Without it, your code will not be able to run correctly (or at all). 
-One option is to use platforms such as `Whole Tale <https://wholetale.org>`_, `Jupyter Binder <https://mybinder.org>`_ or `Renku <https://renkulab.io>`_, which facilitate research reproducibility. Have a look at `Dataverse Integrations <https://guides.dataverse.org/en/5.4/admin/integrations.html>`_ for more information. 
+One option is to use platforms such as `Whole Tale <https://wholetale.org>`_, `Jupyter Binder <https://mybinder.org>`_ or `Renku <https://renkulab.io>`_, which facilitate research reproducibility. For more information, have a look at :doc:`/admin/integrations` in the Admin Guide, especially the sections on :ref:`wholetale`, :ref:`binder`, and :ref:`renku`.
 Another option is to use an automatic code dependency capture, which is often supported through the programming language. Here are a few examples:
 
 - If you are using the conda package manager, you can export your environment with the command ``conda env export > environment.yml``. For more information, see the `official documentation <https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#sharing-an-environment>`__.
@@ -322,10 +331,12 @@ You can also search for files within datasets that have been tagged as "Workflow
 
 |cw-image6|
 
+.. _fits:
+
 Astronomy (FITS)
 ----------------
 
-Metadata found in the header section of `Flexible Image Transport System (FITS) files <http://fits.gsfc.nasa.gov/fits_primer.html>`_ are automatically extracted by the Dataverse Software, aggregated and displayed in the Astronomy Domain-Specific Metadata of the Dataset that the file belongs to. This FITS file metadata, is therefore searchable and browsable (facets) at the Dataset-level.
+Metadata found in the header section of `Flexible Image Transport System (FITS) files <https://fits.gsfc.nasa.gov/fits_primer.html>`_ are automatically extracted by the Dataverse Software, aggregated and displayed in the Astronomy Domain-Specific Metadata of the Dataset that the file belongs to. This FITS file metadata, is therefore searchable and browsable (facets) at the Dataset-level.
 
 .. _geojson:
 
@@ -334,15 +345,67 @@ GeoJSON
 
 A map will be shown as a preview of GeoJSON files when the previewer has been enabled (see :ref:`file-previews`). See also a `video demo <https://www.youtube.com/watch?v=EACJJaV3O1c&t=588s>`_ of the GeoJSON previewer by its author, Kaitlin Newson.
 
+.. _geotiff:
+
+GeoTIFF
+-------
+
+A map is also displayed as a preview of GeoTiFF image files, whose previewer must be enabled (see :ref:`file-previews`). Since GeoTIFFs do not have their own mimetype, it is advisable to use this previewer only when GeoTIFFs are used (and not "normal" TIFs). For performance reasons, this previewer has a file size limit of 15 MB and a row/column limit of 50,000 so that larger files are not loaded.
+
+.. _shapefile:
+
+Shapefile
+---------
+
+Another previewer can be enabled for shapefiles (see :ref:`file-previews`). This previewer only works with zipped shapefiles (see :doc:`/developers/geospatial`). A file size limit of 20 MB is set for this previewer (also because of performance reasons).
+
 .. _netcdf-and-hdf5:
 
 NetCDF and HDF5
 ---------------
 
+H5Web Previewer
+~~~~~~~~~~~~~~~
+
+NetCDF and HDF5 files can be explored and visualized with H5Web_, which has been adapted into a file previewer tool (see :ref:`file-previews`) that can be enabled in your Dataverse installation.
+
+.. _H5Web: https://h5web.panosc.eu
+
+|h5web|
+
+NcML
+~~~~
+
 For NetCDF and HDF5 files, an attempt will be made to extract metadata in NcML_ (XML) format and save it as an auxiliary file. (See also :doc:`/developers/aux-file-support` in the Developer Guide.) A previewer for these NcML files is available (see :ref:`file-previews`).
 
+Please note that only modern versions of these formats, the ones based on HDF5 such as NetCDF 4+ and HDF5 itself (rather than HDF4), will yield an NcML auxiliary file.
+
 .. _NcML: https://docs.unidata.ucar.edu/netcdf-java/current/userguide/ncml_overview.html
 
+Geospatial Bounding Box
+~~~~~~~~~~~~~~~~~~~~~~~
+
+An attempt will be made to extract a geospatial bounding box (west, south, east, north) from NetCDF and HDF5 files and then insert these values into the geospatial metadata block, if enabled.
+
+This is the mapping that is used:
+
+- geospatial_lon_min: West Longitude
+- geospatial_lon_max: East Longitude
+- geospatial_lat_max: North Latitude
+- geospatial_lat_min: South Latitude
+
+Please note the following rules regarding these fields:
+
+- West Longitude and East Longitude are expected to be in the range of -180 and 180. (When using :ref:`geospatial-search`, you should use this range for longitude.)
+- If West Longitude and East Longitude are both over 180 (outside the expected -180:180 range), 360 will be subtracted to shift the values from the 0:360 range to the expected -180:180 range.
+- If either West Longitude or East Longitude are less than zero but the other longitude is greater than 180 (which would imply an indeterminate domain, a lack of clarity of if the domain is -180:180 or 0:360), metadata will be not be extracted.
+- If the bounding box was successfully populated, the subsequent removal of the NetCDF or HDF5 file from the dataset does not automatically remove the bounding box from the dataset metadata. You must remove the bounding box manually, if desired.
+- This feature is disabled if S3 direct upload is enabled (see :ref:`s3-direct-upload-features-disabled`) unless :ref:`dataverse.netcdf.geo-extract-s3-direct-upload` has been set to true.
+
+If the bounding box was successfully populated, :ref:`geospatial-search` should be able to find it.
+
+.. _compressed-files:
+
 Compressed Files
 ----------------
 
@@ -432,7 +495,7 @@ Choosing a License
 ------------------
 
 Each Dataverse installation provides a set of license(s) data can be released under, and whether users can specify custom terms instead (see below). 
-One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication <http://creativecommons.org/publicdomain/zero/1.0>`_) serves as the default if you do not make an explicit choice.
+One of the available licenses (often the `Creative Commons CC0 Public Domain Dedication <https://creativecommons.org/publicdomain/zero/1.0>`_) serves as the default if you do not make an explicit choice.
 If you want to apply one of the other available licenses to your dataset, you can change it on the Terms tab of your Dataset page.
 
 License Selection and Professional Norms
@@ -720,7 +783,7 @@ The "Compute" button on dataset and file pages will allow you to compute on a si
 Cloud Storage Access
 --------------------
 
-If you need to access a dataset in a more flexible way than the Compute button provides, then you can use the Cloud Storage Access box on the dataset page to copy the dataset's container name. This unique identifer can then be used to allow direct access to the dataset.
+If you need to access a dataset in a more flexible way than the Compute button provides, then you can use the Cloud Storage Access box on the dataset page to copy the dataset's container name. This unique identifier can then be used to allow direct access to the dataset.
 
 .. _deaccession:
 
@@ -763,6 +826,8 @@ If you deaccession the most recently published version of the dataset but not al
    :class: img-responsive
 .. |bagit-image1| image:: ./img/bagit-handler-errors.png
    :class: img-responsive
+.. |h5web| image:: ./img/h5web.png
+   :class: img-responsive
    
 .. _Make Data Count: https://makedatacount.org
 .. _Crossref: https://crossref.org
diff --git a/doc/sphinx-guides/source/user/dataverse-management.rst b/doc/sphinx-guides/source/user/dataverse-management.rst
index ed90497da8c..0e0fbcc0883 100755
--- a/doc/sphinx-guides/source/user/dataverse-management.rst
+++ b/doc/sphinx-guides/source/user/dataverse-management.rst
@@ -25,6 +25,8 @@ Creating a Dataverse collection is easy but first you must be a registered user
     * **Category**: Select a category that best describes the type of Dataverse collection this will be. For example, if this is a Dataverse collection for an individual researcher's datasets, select *Researcher*. If this is a Dataverse collection for an institution, select *Organization or Institution*.
     * **Email**: This is the email address that will be used as the contact for this particular Dataverse collection. You can have more than one contact email address for your Dataverse collection.
     * **Description**: Provide a description of this Dataverse collection. This will display on the landing page of your Dataverse collection and in the search result list. The description field supports certain HTML tags, if you'd like to format your text (<a>, <b>, <blockquote>, <br>, <code>, <del>, <dd>, <dl>, <dt>, <em>, <hr>, <h1>-<h3>, <i>, <img>, <kbd>, <li>, <ol>, <p>, <pre>, <s>, <sup>, <sub>, <strong>, <strike>, <u>, <ul>).
+    * **Dataset Metadata Langauge**: (If enabled) Select which language should be used when entering dataset metadata, or leave that choice to dataset creators. 
+    * **Guestbook Entry Option**: (If enabled) Select whether guestbooks are displayed when a user requests access to restricted file(s) or when they initiate a download.
 #. **Choose the sets of Metadata Fields for datasets in this Dataverse collection**:
     * By default the metadata elements will be from the host Dataverse collection that this new Dataverse collection is created in.
     * The Dataverse Software offers metadata standards for multiple domains. To learn more about the metadata standards in the Dataverse Software please check out the :doc:`/user/appendix`.
@@ -212,11 +214,11 @@ Dataset linking allows a Dataverse collection owner to "link" their Dataverse co
 
 For example, researchers working on a collaborative study across institutions can each link their own individual institutional Dataverse collections to the one collaborative dataset, making it easier for interested parties from each institution to find the study.
 
-In order to link a dataset, you will need your account to have the "Add Dataset" permission on the Dataverse collection that is doing the linking. If you created the Dataverse collection then you should have this permission already, but if not then you will need to ask the admin of that Dataverse collection to assign that permission to your account. You do not need any special permissions on the dataset being linked.
+In order to link a dataset, you will need your account to have the "Publish Dataset" permission on the Dataverse collection that is doing the linking. If you created the Dataverse collection then you should have this permission already, but if not then you will need to ask the admin of that Dataverse collection to assign that permission to your account. You do not need any special permissions on the dataset being linked.
 
 To link a dataset to your Dataverse collection, you must navigate to that dataset and click the white "Link" button in the upper-right corner of the dataset page. This will open up a window where you can type in the name of the Dataverse collection that you would like to link the dataset to. Select your Dataverse collection and click the save button. This will establish the link, and the dataset will now appear under your Dataverse collection.
 
-There is currently no way to remove established links in the UI. If you need to remove a link between a Dataverse collection and a dataset, please contact the support team for the Dataverse installation you are using.
+There is currently no way to remove established links in the UI. If you need to remove a link between a Dataverse collection and a dataset, please contact the support team for the Dataverse installation you are using (see the :ref:`unlink-a-dataset` section of the Admin Guide for more information).
 
 .. _dataverse-linking:
 
diff --git a/doc/sphinx-guides/source/user/find-use-data.rst b/doc/sphinx-guides/source/user/find-use-data.rst
index 2e82a1482b4..bea23cbcd0e 100755
--- a/doc/sphinx-guides/source/user/find-use-data.rst
+++ b/doc/sphinx-guides/source/user/find-use-data.rst
@@ -71,7 +71,7 @@ View Files
 
 Files in a Dataverse installation each have their own landing page that can be reached through the search results or through the Files table on their parent dataset's page. The dataset page and file page offer much the same functionality in terms of viewing and editing files, with a few small exceptions. 
 
-- In installations that have enabled support for persistent identifers (PIDs) at the file level, the file page includes the file's DOI or handle, which can be found in the file citation and also under the Metadata tab.
+- In installations that have enabled support for persistent identifiers (PIDs) at the file level, the file page includes the file's DOI or handle, which can be found in the file citation and also under the Metadata tab.
 - Previewers for several common file types are available and can be added by installation administrators.
 - The file page's Versions tab gives you a version history that is more focused on the individual file rather than the dataset as a whole. 
 
diff --git a/doc/sphinx-guides/source/user/img/h5web.png b/doc/sphinx-guides/source/user/img/h5web.png
new file mode 100644
index 00000000000..176aa775114
Binary files /dev/null and b/doc/sphinx-guides/source/user/img/h5web.png differ
diff --git a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst
index f1d5611ede9..33ae9b555e6 100644
--- a/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst
+++ b/doc/sphinx-guides/source/user/tabulardataingest/ingestprocess.rst
@@ -27,7 +27,7 @@ separately, in a relational database, so that it can be accessed
 efficiently by the application. For the purposes of archival
 preservation it can be exported, in plain text XML files, using a
 standardized, open `DDI Codebook
-<http://www.ddialliance.org/Specification/DDI-Codebook/2.5/>`_
+<https://www.ddialliance.org/Specification/DDI-Codebook/2.5/>`_
 format. (more info below)
 
 
@@ -53,6 +53,6 @@ Tabular Metadata in the Dataverse Software
 
 The structure of the metadata defining tabular data variables used in
 the Dataverse Software was originally based on the `DDI Codebook
-<http://www.ddialliance.org/Specification/DDI-Codebook/2.5/>`_ format.
+<https://www.ddialliance.org/Specification/DDI-Codebook/2.5/>`_ format.
 
 You can see an example of DDI output under the :ref:`data-variable-metadata-access` section of the :doc:`/api/dataaccess` section of the API Guide.
diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst
index 4badeabef40..2cf7f46dc5e 100755
--- a/doc/sphinx-guides/source/versions.rst
+++ b/doc/sphinx-guides/source/versions.rst
@@ -6,8 +6,11 @@ Dataverse Software Documentation Versions
 
 This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases <https://github.com/IQSS/dataverse/releases>`__ page in our GitHub repo.
 
-- `develop Git branch <http://preview.guides.gdcc.io/en/develop/>`__
-- 5.13
+- pre-release `HTML (not final!) <http://preview.guides.gdcc.io/en/develop/>`__ and `PDF (experimental!) <http://preview.guides.gdcc.io/_/downloads/en/develop/pdf/>`__ built from the :doc:`develop </developers/version-control>` branch :doc:`(how to contribute!) </developers/documentation>`
+- 6.1
+- `6.0 </en/6.0/>`__
+- `5.14 </en/5.14/>`__
+- `5.13 </en/5.13/>`__
 - `5.12.1 </en/5.12.1/>`__
 - `5.12 </en/5.12/>`__
 - `5.11.1 </en/5.11.1/>`__
diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml
new file mode 100644
index 00000000000..5265a6b7c2d
--- /dev/null
+++ b/docker-compose-dev.yml
@@ -0,0 +1,220 @@
+version: "2.4"
+
+services:
+
+  dev_dataverse:
+    container_name: "dev_dataverse"
+    hostname: dataverse
+    image: ${APP_IMAGE}
+    restart: on-failure
+    user: payara
+    environment:
+      DATAVERSE_DB_HOST: postgres
+      DATAVERSE_DB_PASSWORD: secret
+      DATAVERSE_DB_USER: ${DATAVERSE_DB_USER}
+      ENABLE_JDWP: "1"
+      DATAVERSE_FEATURE_API_BEARER_AUTH: "1"
+      DATAVERSE_AUTH_OIDC_ENABLED: "1"
+      DATAVERSE_AUTH_OIDC_CLIENT_ID: test
+      DATAVERSE_AUTH_OIDC_CLIENT_SECRET: 94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8
+      DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL: http://keycloak.mydomain.com:8090/realms/test
+      DATAVERSE_JSF_REFRESH_PERIOD: "1"
+      JVM_ARGS: -Ddataverse.files.storage-driver-id=file1
+        -Ddataverse.files.file1.type=file
+        -Ddataverse.files.file1.label=Filesystem
+        -Ddataverse.files.file1.directory=${STORAGE_DIR}/store
+        -Ddataverse.files.localstack1.type=s3
+        -Ddataverse.files.localstack1.label=LocalStack
+        -Ddataverse.files.localstack1.custom-endpoint-url=http://localstack:4566
+        -Ddataverse.files.localstack1.custom-endpoint-region=us-east-2
+        -Ddataverse.files.localstack1.bucket-name=mybucket
+        -Ddataverse.files.localstack1.path-style-access=true
+        -Ddataverse.files.localstack1.upload-redirect=true
+        -Ddataverse.files.localstack1.download-redirect=true
+        -Ddataverse.files.localstack1.access-key=default
+        -Ddataverse.files.localstack1.secret-key=default
+        -Ddataverse.files.minio1.type=s3
+        -Ddataverse.files.minio1.label=MinIO
+        -Ddataverse.files.minio1.custom-endpoint-url=http://minio:9000
+        -Ddataverse.files.minio1.custom-endpoint-region=us-east-1
+        -Ddataverse.files.minio1.bucket-name=mybucket
+        -Ddataverse.files.minio1.path-style-access=true
+        -Ddataverse.files.minio1.upload-redirect=false
+        -Ddataverse.files.minio1.download-redirect=false
+        -Ddataverse.files.minio1.access-key=4cc355_k3y
+        -Ddataverse.files.minio1.secret-key=s3cr3t_4cc355_k3y
+    ports:
+      - "8080:8080" # HTTP (Dataverse Application)
+      - "4848:4848" # HTTP (Payara Admin Console)
+      - "9009:9009" # JDWP
+      - "8686:8686" # JMX
+    networks:
+      - dataverse
+    depends_on:
+      - dev_postgres
+      - dev_solr
+      - dev_dv_initializer
+    volumes:
+      - ./docker-dev-volumes/app/data:/dv
+      - ./docker-dev-volumes/app/secrets:/secrets
+      # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain).
+      # Replace 6.0 with the current version.
+      # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse
+    tmpfs:
+      - /dumps:mode=770,size=2052M,uid=1000,gid=1000
+      - /tmp:mode=770,size=2052M,uid=1000,gid=1000
+    mem_limit: 2147483648 # 2 GiB
+    mem_reservation: 1024m
+    privileged: false
+
+  dev_bootstrap:
+    container_name: "dev_bootstrap"
+    image: gdcc/configbaker:unstable
+    restart: "no"
+    command:
+      - bootstrap.sh
+      - dev
+    networks:
+      - dataverse
+
+  dev_dv_initializer:
+    container_name: "dev_dv_initializer"
+    image: gdcc/configbaker:unstable
+    restart: "no"
+    command:
+      - sh
+      - -c
+      - "fix-fs-perms.sh dv"
+    volumes:
+      - ./docker-dev-volumes/app/data:/dv
+
+  dev_postgres:
+    container_name: "dev_postgres"
+    hostname: postgres
+    image: postgres:${POSTGRES_VERSION}
+    restart: on-failure
+    environment:
+      - POSTGRES_USER=${DATAVERSE_DB_USER}
+      - POSTGRES_PASSWORD=secret
+    ports:
+      - "5432:5432"
+    networks:
+      - dataverse
+    volumes:
+      - ./docker-dev-volumes/postgresql/data:/var/lib/postgresql/data
+
+  dev_solr_initializer:
+    container_name: "dev_solr_initializer"
+    image: gdcc/configbaker:unstable
+    restart: "no"
+    command:
+      - sh
+      - -c
+      - "fix-fs-perms.sh solr && cp -a /template/* /solr-template"
+    volumes:
+      - ./docker-dev-volumes/solr/data:/var/solr
+      - ./docker-dev-volumes/solr/conf:/solr-template
+
+  dev_solr:
+    container_name: "dev_solr"
+    hostname: "solr"
+    image: solr:${SOLR_VERSION}
+    depends_on:
+      - dev_solr_initializer
+    restart: on-failure
+    ports:
+      - "8983:8983"
+    networks:
+      - dataverse
+    command:
+      - "solr-precreate"
+      - "collection1"
+      - "/template"
+    volumes:
+      - ./docker-dev-volumes/solr/data:/var/solr
+      - ./docker-dev-volumes/solr/conf:/template
+
+  dev_smtp:
+    container_name: "dev_smtp"
+    hostname: "smtp"
+    image: maildev/maildev:2.0.5
+    restart: on-failure
+    ports:
+      - "25:25" # smtp server
+      - "1080:1080" # web ui
+    environment:
+      - MAILDEV_SMTP_PORT=25
+      - MAILDEV_MAIL_DIRECTORY=/mail
+    networks:
+      - dataverse
+    #volumes:
+    #  - ./docker-dev-volumes/smtp/data:/mail
+    tmpfs:
+      - /mail:mode=770,size=128M,uid=1000,gid=1000
+
+  dev_keycloak:
+    container_name: "dev_keycloak"
+    image: 'quay.io/keycloak/keycloak:21.0'
+    hostname: keycloak
+    environment:
+      - KEYCLOAK_ADMIN=kcadmin
+      - KEYCLOAK_ADMIN_PASSWORD=kcpassword
+      - KEYCLOAK_LOGLEVEL=DEBUG
+      - KC_HOSTNAME_STRICT=false
+    networks:
+      dataverse:
+        aliases:
+          - keycloak.mydomain.com #create a DNS alias within the network (add the same alias to your /etc/hosts to get a working OIDC flow)
+    command: start-dev --import-realm --http-port=8090  # change port to 8090, so within the network and external the same port is used
+    ports:
+      - "8090:8090"
+    volumes:
+      - './conf/keycloak/test-realm.json:/opt/keycloak/data/import/test-realm.json'
+
+  dev_nginx:
+    container_name: dev_nginx
+    image: gdcc/dev_nginx:unstable
+    ports:
+      - "4849:4849"
+    restart: always
+    networks:
+      - dataverse
+
+  dev_localstack:
+    container_name: "dev_localstack"
+    hostname: "localstack"
+    image: localstack/localstack:2.3.2
+    restart: on-failure
+    ports:
+      - "127.0.0.1:4566:4566"
+    environment:
+      - DEBUG=${DEBUG-}
+      - DOCKER_HOST=unix:///var/run/docker.sock
+      - HOSTNAME_EXTERNAL=localstack
+    networks:
+      - dataverse
+    volumes:
+      - ./conf/localstack:/etc/localstack/init/ready.d
+    tmpfs:
+      - /localstack:mode=770,size=128M,uid=1000,gid=1000
+
+  dev_minio:
+    container_name: "dev_minio"
+    hostname: "minio"
+    image: minio/minio
+    restart: on-failure
+    ports:
+      - "9000:9000"
+      - "9001:9001"
+    networks:
+      - dataverse
+    volumes:
+      - minio_storage:/data
+    environment:
+      MINIO_ROOT_USER: 4cc355_k3y
+      MINIO_ROOT_PASSWORD: s3cr3t_4cc355_k3y
+    command: server /data
+
+networks:
+  dataverse:
+    driver: bridge
diff --git a/downloads/.gitignore b/downloads/.gitignore
deleted file mode 100644
index 1b51bf4def7..00000000000
--- a/downloads/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-payara-5.201.zip
-solr-7.3.0.tgz
-weld-osgi-bundle-2.2.10.Final-glassfish4.jar
-schemaSpy_5.0.0.jar
diff --git a/downloads/download.sh b/downloads/download.sh
deleted file mode 100755
index 7b9de0397cb..00000000000
--- a/downloads/download.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-curl -L -O https://s3-eu-west-1.amazonaws.com/payara.fish/Payara+Downloads/5.2022.3/payara-5.2022.3.zip
-curl -L -O https://archive.apache.org/dist/lucene/solr/8.11.1/solr-8.11.1.tgz
-curl -L -O https://search.maven.org/remotecontent?filepath=org/jboss/weld/weld-osgi-bundle/2.2.10.Final/weld-osgi-bundle-2.2.10.Final-glassfish4.jar
-curl -s -L http://sourceforge.net/projects/schemaspy/files/schemaspy/SchemaSpy%205.0.0/schemaSpy_5.0.0.jar/download > schemaSpy_5.0.0.jar
diff --git a/downloads/stata-13-test-files/Stata14TestFile.dta b/downloads/stata-13-test-files/Stata14TestFile.dta
deleted file mode 100644
index 6f1c31dc798..00000000000
Binary files a/downloads/stata-13-test-files/Stata14TestFile.dta and /dev/null differ
diff --git a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar b/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar
deleted file mode 100644
index ee499ae4b76..00000000000
Binary files a/local_lib/com/apicatalog/titanium-json-ld/1.3.0-SNAPSHOT/titanium-json-ld-1.3.0-SNAPSHOT.jar and /dev/null differ
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar
deleted file mode 100644
index dc41f94046f..00000000000
Binary files a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar and /dev/null differ
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5
deleted file mode 100644
index 7018ea4e822..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.md5
+++ /dev/null
@@ -1 +0,0 @@
-eeef5c0dc201d1105b9529a51fa8cdab
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1
deleted file mode 100644
index 97f192f3732..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-1fa716d318920fd59fc63f77965d113decf97355
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom
deleted file mode 100644
index ea2e4c03f9f..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.harvard.iq.dvn</groupId>
-  <artifactId>unf5</artifactId>
-  <version>5.0</version>
-</project>
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5
deleted file mode 100644
index a88cf2a1c02..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.md5
+++ /dev/null
@@ -1 +0,0 @@
-2df5dac09375e1e7fcd66c705d9ca2ef
diff --git a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1 b/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1
deleted file mode 100644
index 967b977b79e..00000000000
--- a/local_lib/edu/harvard/iq/dvn/unf5/5.0/unf5-5.0.pom.sha1
+++ /dev/null
@@ -1 +0,0 @@
-431cd55e2e9379677d14e402dd3c474bb7be4ac9
diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar
deleted file mode 100644
index 1f8e1c3eb12..00000000000
Binary files a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.jar and /dev/null differ
diff --git a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom b/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom
deleted file mode 100644
index e3c09349172..00000000000
--- a/local_lib/net/handle/handle/8.1.1/handle-8.1.1.pom
+++ /dev/null
@@ -1,9 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>net.handle</groupId>
-  <artifactId>handle</artifactId>
-  <version>8.1.1</version>
-  <description>POM was created from install:install-file</description>
-</project>
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar
deleted file mode 100644
index b3bddd62c24..00000000000
Binary files a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar and /dev/null differ
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5
deleted file mode 100644
index 576062f55a1..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.md5
+++ /dev/null
@@ -1 +0,0 @@
-b0abb2fee242c479f305f47352600bbf
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1
deleted file mode 100644
index e81e8450ef0..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9643e138cb5ed2684838e4b4faa118adfb2ecb4b
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom
deleted file mode 100644
index b57cd67278b..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>nom.tam.fits</groupId>
-  <artifactId>fits</artifactId>
-  <version>2012-10-25-generated</version>
-</project>
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5
deleted file mode 100644
index 777b4df3325..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.md5
+++ /dev/null
@@ -1 +0,0 @@
-23ca47c46df791f220a87cfef3b2190c
diff --git a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1 b/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1
deleted file mode 100644
index b5f41fd1a69..00000000000
--- a/local_lib/nom/tam/fits/fits/2012-10-25-generated/fits-2012-10-25-generated.pom.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c1ec9dfbbc72dc4623d309d772b804e47284ee27
diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml b/local_lib/nom/tam/fits/fits/maven-metadata.xml
deleted file mode 100644
index 4fc3254df3f..00000000000
--- a/local_lib/nom/tam/fits/fits/maven-metadata.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<metadata>
-  <groupId>nom.tam.fits</groupId>
-  <artifactId>fits</artifactId>
-  <versioning>
-    <release>2012-10-25-generated</release>
-    <versions>
-      <version>2012-10-25-generated</version>
-    </versions>
-    <lastUpdated>20130925190525</lastUpdated>
-  </versioning>
-</metadata>
diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5 b/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5
deleted file mode 100644
index b6d7e4a726f..00000000000
--- a/local_lib/nom/tam/fits/fits/maven-metadata.xml.md5
+++ /dev/null
@@ -1 +0,0 @@
-545c78160393b4c80e40377f2a7cf406
\ No newline at end of file
diff --git a/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1 b/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1
deleted file mode 100644
index 188cf8ae044..00000000000
--- a/local_lib/nom/tam/fits/fits/maven-metadata.xml.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9cf56b8ef3f2bacdc669c2c7cdcd7cd50ed38dbb
\ No newline at end of file
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar
deleted file mode 100644
index d2738e2dadd..00000000000
Binary files a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar and /dev/null differ
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5 b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5
deleted file mode 100644
index 04ca3e73ce8..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.md5
+++ /dev/null
@@ -1 +0,0 @@
-bd9b84a9ad737a81a2699ab81541a901
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1 b/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1
deleted file mode 100644
index a48cef32570..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4cad279c362e4c5c17a2058dc2c8f2fc97c76bf8
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom
deleted file mode 100644
index 06f1508723f..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
-    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.dataverse</groupId>
-  <artifactId>unf</artifactId>
-  <version>6.0</version>
-</project>
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5 b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5
deleted file mode 100644
index 138bc9c95f6..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.md5
+++ /dev/null
@@ -1 +0,0 @@
-230c5b1f5ae71bb2fe80ef9e7209f681
diff --git a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1 b/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1
deleted file mode 100644
index 689e8045418..00000000000
--- a/local_lib/org/dataverse/unf/6.0/unf-6.0.pom.sha1
+++ /dev/null
@@ -1 +0,0 @@
-286b819f2fc7432a94b5940c6171be1589f66a37
diff --git a/mdc-logs/raw-mdc-2019-01-07.log b/mdc-logs/raw-mdc-2019-01-07.log
deleted file mode 100644
index d7a6386160e..00000000000
--- a/mdc-logs/raw-mdc-2019-01-07.log
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fields: event_time	client_ip	session_cookie_id	user_cookie_id	user_id	request_url	identifier	filename	size	user-agent	title	publisher	publisher_id	authors	publication_date	version	other_id	target_url	publication_year
-2019-01-07T15:14:51-0500	0:0:0:0:0:0:0:1	9f4209d3c177d3cb77f4d06cf3ba	-	:guest	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	doi:10.5072/FK2/XTT5BV	-	-	Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36	Dataset One	-	1	Smith, Robert| Kew, Susie	2019-01-07T18:20:54Z	1	-	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	2019
-2019-01-07T15:15:15-0500	0:0:0:0:0:0:0:1	9f4209d3c177d3cb77f4d06cf3ba	-	:guest	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	doi:10.5072/FK2/XTT5BV	-	-	Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36	Dataset One	-	1	Smith, Robert| Kew, Susie	2019-01-07T18:20:54Z	1	-	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	2019
-2019-01-07T15:16:04-0500	0:0:0:0:0:0:0:1	9f4209d3c177d3cb77f4d06cf3ba	-	:guest	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	doi:10.5072/FK2/XTT5BV	-	-	Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36	Dataset One	-	1	Smith, Robert| Kew, Susie	2019-01-07T18:20:54Z	1	-	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	2019
-2019-01-07T15:16:14-0500	0:0:0:0:0:0:0:1	9f4209d3c177d3cb77f4d06cf3ba	-	:guest	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	doi:10.5072/FK2/XTT5BV	168298bae7c-2c5bbc1a9c8c	1	Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36	Dataset One	-	1	Smith, Robert| Kew, Susie	2019-01-07T18:20:54Z	1	-	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	2019
-2019-01-07T15:16:19-0500	0:0:0:0:0:0:0:1	9f4209d3c177d3cb77f4d06cf3ba	-	:guest	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	doi:10.5072/FK2/XTT5BV	168298bb8ce-337d8df49763	4026	Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36	Dataset One	-	1	Smith, Robert| Kew, Susie	2019-01-07T18:20:54Z	1	-	http://localhost:8080/dataset.xhtml?persistentId=doi:10.5072/FK2/XTT5BV	2019
diff --git a/modules/container-base/README.md b/modules/container-base/README.md
index 15011d5c6f4..dc4d185bbb5 100644
--- a/modules/container-base/README.md
+++ b/modules/container-base/README.md
@@ -7,7 +7,9 @@ Adding basic functionality like executing scripts at container boot, monitoring,
 at this layer. Application images building from this very base focus on adding deployable Dataverse code and 
 actual scripts.
 
-*Note:* Currently, there is no application image. Please watch https://github.com/IQSS/dataverse/issues/8934
+There is a community based [application image](https://hub.docker.com/r/gdcc/dataverse) 
+([docs](https://guides.dataverse.org/en/latest/container/app-image.html)), but you may create your own or even reuse
+this image for other purposes than the Dataverse application.
 
 ## Quick Reference
 
@@ -23,8 +25,8 @@ provides in-depth information about content, building, tuning and so on for this
 **Where to get help and ask questions:**
 
 IQSS will not offer support on how to deploy or run it. Please reach out to the community for help on using it.
-You can join the Community Chat on Matrix at https://chat.dataverse.org or the Community Slack at
-https://dataversecommunity.slack.com to ask for help and guidance.
+You can join the Community Chat on Matrix at https://chat.dataverse.org and https://groups.google.com/g/dataverse-community
+to ask for help and guidance.
 
 ## Supported Image Tags
 
@@ -34,7 +36,7 @@ happens there (again, by the community). Community-supported image tags are base
 
 - The `unstable` tag corresponds to the `develop` branch, where pull requests are merged.
   ([`Dockerfile`](https://github.com/IQSS/dataverse/tree/develop/modules/container-base/src/main/docker/Dockerfile))
-- The `stable` tag corresponds to the `master` branch, where releases are cut from.
+- The `alpha` tag corresponds to the `master` branch, where releases are cut from.
   ([`Dockerfile`](https://github.com/IQSS/dataverse/tree/master/modules/container-base/src/main/docker/Dockerfile))
 
 Within the main repository, you may find the base image files at `<git root>/modules/container-base`.
diff --git a/modules/container-base/pom.xml b/modules/container-base/pom.xml
index bbee6ad67d5..fc672696df4 100644
--- a/modules/container-base/pom.xml
+++ b/modules/container-base/pom.xml
@@ -44,6 +44,7 @@
                 <java.image>eclipse-temurin:${target.java.version}-jre</java.image>
                 <base.image.uid>1000</base.image.uid>
                 <base.image.gid>1000</base.image.gid>
+                <docker.platforms>linux/amd64,linux/arm64</docker.platforms>
             </properties>
             
             <build>
@@ -94,8 +95,8 @@
                                     <build>
                                         <buildx>
                                             <platforms>
-                                                <platform>linux/arm64</platform>
-                                                <platform>linux/amd64</platform>
+                                                <!-- Will be empty by default, deactivating buildx -->
+                                                <platform>${docker.platforms}</platform>
                                             </platforms>
                                             <dockerStateDir>${project.build.directory}/buildx-state</dockerStateDir>
                                         </buildx>
diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile
index 07968e92359..97aa4cd2792 100644
--- a/modules/container-base/src/main/docker/Dockerfile
+++ b/modules/container-base/src/main/docker/Dockerfile
@@ -164,6 +164,8 @@ RUN <<EOF
     ${ASADMIN} create-jvm-options '-XX\:MetaspaceSize=${ENV=MEM_METASPACE_SIZE}'
     ${ASADMIN} create-jvm-options '-XX\:MaxMetaspaceSize=${ENV=MEM_MAX_METASPACE_SIZE}'
     ${ASADMIN} create-jvm-options '-XX\:+IgnoreUnrecognizedVMOptions'
+    # Workaround for FISH-7722: Failed to deploy war with @Stateless https://github.com/payara/Payara/issues/6337
+    ${ASADMIN} create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED
     # Disable autodeploy and hot reload
     ${ASADMIN} set configs.config.server-config.admin-service.das-config.dynamic-reload-enabled="false"
     ${ASADMIN} set configs.config.server-config.admin-service.das-config.autodeploy-enabled="false"
@@ -190,6 +192,9 @@ RUN <<EOF
     ### DATAVERSE APPLICATION SPECIFICS
     # Configure the MicroProfile directory config source to point to /secrets
     ${ASADMIN} set-config-dir --directory="${SECRETS_DIR}"
+    # Password alias store = 105, default = 100 - lets sort between those to enable overriding from all of the others
+    # except alias config source and microprofile-config.properties
+    ${ASADMIN} set-config-ordinal --ordinal=104 --source=secrets
     # Make request timeouts configurable via MPCONFIG (default to 900 secs = 15 min)
     ${ASADMIN} set 'server-config.network-config.protocols.protocol.http-listener-1.http.request-timeout-seconds=${MPCONFIG=dataverse.http.timeout:900}'
     # TODO: what of the below 3 items can be deleted for container usage?
@@ -203,7 +208,7 @@ RUN <<EOF
     # Stop domain
     ${ASADMIN} stop-domain "${DOMAIN_NAME}"
     # Disable JSP servlet dynamic reloads
-    sed -i 's#<servlet-class>org.apache.jasper.servlet.JspServlet</servlet-class>#<servlet-class>org.apache.jasper.servlet.JspServlet</servlet-class>\n    <init-param>\n      <param-name>development</param-name>\n      <param-value>false</param-value>\n    </init-param>\n    <init-param>\n      <param-name>genStrAsCharArray</param-name>\n      <param-value>true</param-value>\n    </init-param>#' "${DOMAIN_DIR}/config/default-web.xml"
+    sed -i 's#<servlet-class>org.glassfish.wasp.servlet.JspServlet</servlet-class>#<servlet-class>org.glassfish.wasp.servlet.JspServlet</servlet-class>\n    <init-param>\n      <param-name>development</param-name>\n      <param-value>false</param-value>\n    </init-param>\n    <init-param>\n      <param-name>genStrAsCharArray</param-name>\n      <param-value>true</param-value>\n    </init-param>#' "${DOMAIN_DIR}/config/default-web.xml"
     # Cleanup old CA certificates to avoid unnecessary log clutter during startup
     ${SCRIPT_DIR}/removeExpiredCaCerts.sh
     # Delete generated files
diff --git a/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh b/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh
index e2d717af666..8729f78e466 100644
--- a/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh
+++ b/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh
@@ -61,5 +61,6 @@ find "$DEPLOY_DIR" -mindepth 1 -maxdepth 1 -name "*.rar" -print0 \
   | while IFS= read -r -d '' file; do deploy "$file"; done
 
 # Then every other WAR, EAR, JAR or directory
-find "$DEPLOY_DIR" -mindepth 1 -maxdepth 1 ! -name "*.rar" -a -name "*.war" -o -name "*.ear" -o -name "*.jar" -o -type d -print0 \
-  | while IFS= read -r -d '' file; do deploy "$file"; done
\ No newline at end of file
+find "$DEPLOY_DIR" -mindepth 1 -maxdepth 1 \
+  \( ! -name "*.rar" -a -name "*.war" -o -name "*.ear" -o -name "*.jar" -o -type d \) \
+  -print0 | while IFS= read -r -d '' file; do deploy "$file"; done
diff --git a/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh b/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh
index 205a9eda5d7..c019c09130e 100644
--- a/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh
+++ b/modules/container-base/src/main/docker/scripts/removeExpiredCaCerts.sh
@@ -8,6 +8,14 @@
 set -euo pipefail
 
 KEYSTORE="${DOMAIN_DIR}/config/cacerts.jks"
+if [ ! -r "${KEYSTORE}" ]; then
+  KEYSTORE="${DOMAIN_DIR}/config/cacerts.p12"
+  if [ ! -r "${KEYSTORE}" ]; then
+    echo "Could not find CA certs keystore"
+    exit 1
+  fi
+fi
+
 keytool -list -v -keystore "${KEYSTORE}" -storepass changeit 2>/dev/null | \
     grep -i 'alias\|until' > aliases.txt
 
diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile
new file mode 100644
index 00000000000..9b98334d72b
--- /dev/null
+++ b/modules/container-configbaker/Dockerfile
@@ -0,0 +1,58 @@
+# Copyright 2023 Forschungszentrum Jülich GmbH
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# This build arg must be given or build will fail
+ARG SOLR_VERSION
+# We simply have this intermediate stage here without any activity to copy the default configset over
+FROM solr:${SOLR_VERSION} AS solr
+
+# Let's build us a baker
+FROM alpine:3
+
+ENV SCRIPT_DIR="/scripts" \
+    SECRETS_DIR="/secrets" \
+    SOLR_TEMPLATE="/template"
+ENV PATH="${PATH}:${SCRIPT_DIR}" \
+    BOOTSTRAP_DIR="${SCRIPT_DIR}/bootstrap"
+
+ARG APK_PACKAGES="curl bind-tools netcat-openbsd jq bash dumb-init wait4x ed"
+
+RUN true && \
+  # Install necessary software and tools
+  apk add --no-cache ${APK_PACKAGES} && \
+  # Make our working directories
+  mkdir -p ${SCRIPT_DIR} ${SECRETS_DIR} ${SOLR_TEMPLATE}
+
+# Get in the scripts 
+COPY maven/scripts maven/solr/update-fields.sh ${SCRIPT_DIR}/
+# Copy the data from scripts/api that provide the common base setup you'd get from the installer.
+# ".dockerignore" will take care of taking only the bare necessities
+COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/
+# Make the scripts executable
+RUN chmod +x ${SCRIPT_DIR}/*.sh ${BOOTSTRAP_DIR}/*/*.sh
+
+# Copy the Solr config bits
+COPY --from=solr /opt/solr/server/solr/configsets/_default ${SOLR_TEMPLATE}/
+COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/
+RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml
+
+
+# Set the entrypoint to tini (as a process supervisor)
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# By default run a script that will print a help message and terminate
+CMD ["help.sh"]
+
+LABEL org.opencontainers.image.created="@git.build.time@" \
+      org.opencontainers.image.authors="Research Data Management at FZJ <forschungsdaten@fz-juelich.de>" \
+      org.opencontainers.image.url="https://guides.dataverse.org/en/latest/container/" \
+      org.opencontainers.image.documentation="https://guides.dataverse.org/en/latest/container/" \
+      org.opencontainers.image.source="https://github.com/IQSS/dataverse/tree/develop/modules/container-configbaker" \
+      org.opencontainers.image.version="@project.version@" \
+      org.opencontainers.image.revision="@git.commit.id.abbrev@" \
+      org.opencontainers.image.vendor="Global Dataverse Community Consortium" \
+      org.opencontainers.image.licenses="Apache-2.0" \
+      org.opencontainers.image.title="Dataverse Config Baker Image" \
+      org.opencontainers.image.description="This container image configures Dataverse and provides other tooling"
diff --git a/modules/container-configbaker/README.md b/modules/container-configbaker/README.md
new file mode 100644
index 00000000000..17b6f985798
--- /dev/null
+++ b/modules/container-configbaker/README.md
@@ -0,0 +1,46 @@
+# Config Baker
+
+The Config Baker container may be used to execute all sorts of tasks around setting up, preparing and finalizing
+an instance of the Dataverse software. Its focus is bootstrapping non-initialized installations.
+
+You may use this image as is, base your own derivative image on it or use bind mounts to change behavior.
+
+## Quick Reference
+
+**Maintained by:** 
+
+This image is created, maintained and supported by the Dataverse community on a best-effort basis.
+
+**Where to find documentation:**
+
+The [Dataverse Container Guide - Config Baker Image](https://guides.dataverse.org/en/latest/container/configbaker-image.html)
+provides information about this image. 
+
+**Where to get help and ask questions:**
+
+IQSS will not offer support on how to deploy or run it. Please reach out to the community for help on using it.
+You can join the Community Chat on Matrix at https://chat.dataverse.org and https://groups.google.com/g/dataverse-community
+to ask for help and guidance.
+
+## Supported Image Tags
+
+This image is sourced within the main upstream code [repository of the Dataverse software](https://github.com/IQSS/dataverse).
+Development and maintenance of the [image's code](https://github.com/IQSS/dataverse/tree/develop/modules/container-configbaker)
+happens there (again, by the community). Community-supported image tags are based on the two most important branches:
+
+- The `unstable` tag corresponds to the `develop` branch, where pull requests are merged.
+  ([`Dockerfile`](https://github.com/IQSS/dataverse/tree/develop/modules/container-configbaker/src/main/docker/Dockerfile))
+- The `alpha` tag corresponds to the `master` branch, where releases are cut from.
+  ([`Dockerfile`](https://github.com/IQSS/dataverse/tree/master/modules/container-configbaker/src/main/docker/Dockerfile))
+
+## License
+
+Image content created by the community is licensed under [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0), 
+like the [main Dataverse project](https://github.com/IQSS/dataverse/blob/develop/LICENSE.md).
+
+Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
+See the License for the specific language governing permissions and limitations under the License.
+
+As for any pre-built image usage, it is the image user's responsibility to ensure that any use of this image complies
+with any relevant licenses for all software contained within.
diff --git a/modules/container-configbaker/assembly.xml b/modules/container-configbaker/assembly.xml
new file mode 100644
index 00000000000..3285eef510a
--- /dev/null
+++ b/modules/container-configbaker/assembly.xml
@@ -0,0 +1,46 @@
+<assembly xmlns="http://maven.apache.org/ASSEMBLY/2.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/ASSEMBLY/2.0.0 http://maven.apache.org/xsd/assembly-2.0.0.xsd">
+    <fileSets>
+        <!-- Get the scripts -->
+        <fileSet>
+            <directory>modules/container-configbaker/scripts</directory>
+            <outputDirectory>scripts</outputDirectory>
+        </fileSet>
+        <!-- Get our custom Solr files -->
+        <fileSet>
+            <directory>conf/solr/9.3.0</directory>
+            <outputDirectory>solr</outputDirectory>
+        </fileSet>
+        <!-- Get the setup scripts from the installer (selected choice only) -->
+        <fileSet>
+            <directory>scripts/api</directory>
+            <outputDirectory>setup</outputDirectory>
+            <includes>
+                <include>setup-all.sh</include>
+                <include>setup-builtin-roles.sh</include>
+                <include>setup-datasetfields.sh</include>
+                <include>setup-identity-providers.sh</include>
+                
+                <include>data/licenses/*.json</include>
+                <include>data/authentication-providers/builtin.json</include>
+                <include>data/metadatablocks/*.tsv</include>
+                
+                <include>data/dv-root.json</include>
+                
+                <include>data/role-admin.json</include>
+                <include>data/role-curator.json</include>
+                <include>data/role-dsContributor.json</include>
+                <include>data/role-dvContributor.json</include>
+                <include>data/role-editor.json</include>
+                <include>data/role-filedownloader.json</include>
+                <include>data/role-fullContributor.json</include>
+                <include>data/role-member.json</include>
+                
+                <include>data/user-admin.json</include>
+            </includes>
+            <excludes>
+                <exclude>data/metadatablocks/custom*</exclude>
+            </excludes>
+        </fileSet>
+    </fileSets>
+</assembly>
diff --git a/modules/container-configbaker/scripts/bootstrap.sh b/modules/container-configbaker/scripts/bootstrap.sh
new file mode 100644
index 00000000000..a00916880db
--- /dev/null
+++ b/modules/container-configbaker/scripts/bootstrap.sh
@@ -0,0 +1,77 @@
+#!/bin/bash
+
+# [INFO]: Execute bootstrapping configuration of a freshly baked instance
+
+set -euo pipefail
+
+function usage() {
+  echo "Usage: $(basename "$0") [-h] [-u instanceUrl] [-t timeout] [-e targetEnvFile] [<persona>]"
+  echo ""
+  echo "Execute initial configuration (bootstrapping) of an empty Dataverse instance."
+  echo -n "Known personas: "
+  find "${BOOTSTRAP_DIR}" -mindepth 1 -maxdepth 1 -type d -exec basename {} \; | paste -sd ' '
+  echo ""
+  echo "Parameters:"
+  echo "  instanceUrl - Location on container network where to reach your instance. Default: 'http://dataverse:8080'"
+  echo "      timeout - Provide how long to wait for the instance to become available (using wait4x). Default: '2m'"
+  echo "targetEnvFile - Path to a file where the bootstrap process can expose information as env vars (e.g. dataverseAdmin's API token)"
+  echo "      persona - Configure persona to execute. Calls ${BOOTSTRAP_DIR}/<persona>/init.sh. Default: 'base'"
+  echo ""
+  echo "Note: This script will wait for the Dataverse instance to be available before executing the bootstrapping."
+  echo "      It also checks if already bootstrapped before (availability of metadata blocks) and skip if true."
+  echo ""
+  exit 1
+}
+
+# Set some defaults as documented
+DATAVERSE_URL=${DATAVERSE_URL:-"http://dataverse:8080"}
+TIMEOUT=${TIMEOUT:-"3m"}
+TARGET_ENV_FILE=${TARGET_ENV_FILE:-""}
+
+while getopts "u:t:e:h" OPTION
+do
+  case "$OPTION" in
+    u) DATAVERSE_URL="$OPTARG" ;;
+    t) TIMEOUT="$OPTARG" ;;
+    e) TARGET_ENV_FILE="$OPTARG" ;;
+    h) usage;;
+    \?) usage;;
+  esac
+done
+shift $((OPTIND-1))
+
+# Assign persona if present or go default
+PERSONA=${1:-"base"}
+
+# Export the URL to be reused in the actual setup scripts
+export DATAVERSE_URL
+
+# Wait for the instance to become available
+echo "Waiting for ${DATAVERSE_URL} to become ready in max ${TIMEOUT}."
+wait4x http "${DATAVERSE_URL}/api/info/version" -i 8s -t "$TIMEOUT" --expect-status-code 200 --expect-body-json data.version
+
+# Avoid bootstrapping again by checking if a metadata block has been loaded
+BLOCK_COUNT=$(curl -sSf "${DATAVERSE_URL}/api/metadatablocks" | jq ".data | length")
+if [[ $BLOCK_COUNT -gt 0 ]]; then
+  echo "Your instance has been bootstrapped already, skipping."
+  exit 0
+fi
+
+# Provide a space to store environment variables output to
+ENV_OUT=$(mktemp)
+export ENV_OUT
+
+# Now execute the bootstrapping script
+echo "Now executing bootstrapping script at ${BOOTSTRAP_DIR}/${PERSONA}/init.sh."
+# shellcheck disable=SC1090
+source "${BOOTSTRAP_DIR}/${PERSONA}/init.sh"
+
+# If the env file option was given, check if the file is writeable and copy content from the temporary file
+if [[ -n "${TARGET_ENV_FILE}" ]]; then
+  if [[ -f "${TARGET_ENV_FILE}" && -w "${TARGET_ENV_FILE}" ]]; then
+    cat "${ENV_OUT}" > "${TARGET_ENV_FILE}"
+  else
+    echo "File ${TARGET_ENV_FILE} not found, is a directory or not writeable"
+    exit 2
+  fi
+fi
diff --git a/modules/container-configbaker/scripts/bootstrap/base/init.sh b/modules/container-configbaker/scripts/bootstrap/base/init.sh
new file mode 100644
index 00000000000..81c2b59f347
--- /dev/null
+++ b/modules/container-configbaker/scripts/bootstrap/base/init.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+set -euo pipefail
+
+# Set some defaults as documented
+DATAVERSE_URL=${DATAVERSE_URL:-"http://dataverse:8080"}
+export DATAVERSE_URL
+
+./setup-all.sh
diff --git a/modules/container-configbaker/scripts/bootstrap/dev/init.sh b/modules/container-configbaker/scripts/bootstrap/dev/init.sh
new file mode 100644
index 00000000000..efdaee3d0c3
--- /dev/null
+++ b/modules/container-configbaker/scripts/bootstrap/dev/init.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+set -euo pipefail
+
+# Set some defaults as documented
+DATAVERSE_URL=${DATAVERSE_URL:-"http://dataverse:8080"}
+export DATAVERSE_URL
+
+echo "Running base setup-all.sh (INSECURE MODE)..."
+"${BOOTSTRAP_DIR}"/base/setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out
+
+echo "Setting system mail address..."
+curl -X PUT -d "dataverse@localhost" "${DATAVERSE_URL}/api/admin/settings/:SystemEmail"
+
+echo "Setting DOI provider to \"FAKE\"..."
+curl "${DATAVERSE_URL}/api/admin/settings/:DoiProvider" -X PUT -d FAKE
+
+API_TOKEN=$(grep apiToken "/tmp/setup-all.sh.out" | jq ".data.apiToken" | tr -d \")
+export API_TOKEN
+# ${ENV_OUT} comes from bootstrap.sh and will expose the saved information back to the host if enabled.
+echo "API_TOKEN=${API_TOKEN}" >> "${ENV_OUT}"
+
+echo "Publishing root dataverse..."
+curl -H "X-Dataverse-key:$API_TOKEN" -X POST "${DATAVERSE_URL}/api/dataverses/:root/actions/:publish"
+
+echo "Allowing users to create dataverses and datasets in root..."
+curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" -d "{\"assignee\": \":authenticated-users\",\"role\": \"fullContributor\"}" "${DATAVERSE_URL}/api/dataverses/:root/assignments"
+
+echo "Checking Dataverse version..."
+curl "${DATAVERSE_URL}/api/info/version"
+
+echo ""
+echo "Done, your instance has been configured for development. Have a nice day!"
diff --git a/modules/container-configbaker/scripts/fix-fs-perms.sh b/modules/container-configbaker/scripts/fix-fs-perms.sh
new file mode 100644
index 00000000000..9ce8f475d70
--- /dev/null
+++ b/modules/container-configbaker/scripts/fix-fs-perms.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# [INFO]: Fix folder permissions using 'chown' to be writeable by containers not running as root.
+
+set -euo pipefail
+
+if [[ "$(id -un)" != "root" ]]; then
+  echo "This script must be run as user root (not $(id -un)), otherwise no fix is possible."
+fi
+
+DEF_DV_PATH="/dv"
+DEF_SOLR_PATH="/var/solr"
+DEF_DV_UID="1000"
+DEF_SOLR_UID="8983"
+
+function usage() {
+  echo "Usage: $(basename "$0") (dv|solr|[1-9][0-9]{3,4}) [PATH [PATH [...]]]"
+  echo ""
+  echo "You may omit a path when using 'dv' or 'solr' as first argument:"
+  echo "  - 'dv' will default to user $DEF_DV_UID and $DEF_DV_PATH"
+  echo "  - 'solr' will default to user $DEF_SOLR_UID and $DEF_SOLR_PATH"
+  exit 1
+}
+
+# Get a target name or id
+TARGET=${1:-help}
+# Get the rest of the arguments as paths to apply the fix to
+PATHS=( "${@:2}" )
+
+ID=0
+case "$TARGET" in
+  dv)
+    ID="$DEF_DV_UID"
+    # If there is no path, add the default for our app image
+    if [[ ${#PATHS[@]} -eq 0 ]]; then
+      PATHS=( "$DEF_DV_PATH" )
+    fi
+    ;;
+  solr)
+    ID="$DEF_SOLR_UID"
+    # In case there is no path, add the default path for Solr images
+    if [[ ${#PATHS[@]} -eq 0 ]]; then
+      PATHS=( "$DEF_SOLR_PATH" )
+    fi
+    ;;
+  # If there is a digit in the argument, check if this is a valid UID (>= 1000, ...)
+  *[[:digit:]]* )
+    echo "$TARGET" | grep -q "^[1-9][0-9]\{3,4\}$" || usage
+    ID="$TARGET"
+    ;;
+  *)
+    usage
+    ;;
+esac
+
+# Check that we actually have at least 1 path
+if [[ ${#PATHS[@]} -eq 0 ]]; then
+  usage
+fi
+
+# Do what we came for
+chown -R "$ID:$ID" "${PATHS[@]}"
diff --git a/modules/container-configbaker/scripts/help.sh b/modules/container-configbaker/scripts/help.sh
new file mode 100644
index 00000000000..744ec8c8b4c
--- /dev/null
+++ b/modules/container-configbaker/scripts/help.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+set -euo pipefail
+
+# [INFO]: This script.
+
+# This is the Dataverse logo in ASCII
+# shellcheck disable=SC2016
+echo -e '          ╓mαo\n         ╫   jh\n         `%╥æ╨\n           ╫µ\n          ╓@M%╗,\n         ▓`    ╫U\n         ▓²    ╫╛\n          ▓M#M╝"\n  ┌æM╝╝%φ╫┘\n┌╫"      "╫┐\n▓          ▓\n▓          ▓\n`╫µ      ¿╫"\n  "╜%%MM╜`'
+echo ""
+echo "Hello!"
+echo ""
+echo "My name is Config Baker. I'm a container image with lots of tooling to 'bake' a containerized Dataverse instance!"
+echo "I can cook up an instance (initial config), put icing on your Solr search index configuration, and more!"
+echo ""
+echo "Here's a list of things I can do for you:"
+
+# Get the longest name length
+LENGTH=1
+for SCRIPT in "${SCRIPT_DIR}"/*.sh; do
+  L="$(basename "$SCRIPT" | wc -m)"
+  if [ "$L" -gt "$LENGTH" ]; then
+    LENGTH="$L"
+  fi
+done
+
+# Print script names and info, but formatted
+for SCRIPT in "${SCRIPT_DIR}"/*.sh; do
+  printf "%${LENGTH}s - " "$(basename "$SCRIPT")"
+  grep "# \[INFO\]: " "$SCRIPT" | sed -e "s|# \[INFO\]: ||"
+done
+
+echo ""
+echo "Simply execute this container with the script name (and potentially arguments) as 'command'."
diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml
index d85d8aed5a1..7b305cad581 100644
--- a/modules/dataverse-parent/pom.xml
+++ b/modules/dataverse-parent/pom.xml
@@ -14,6 +14,7 @@
         <module>../../pom.xml</module>
         <module>../../scripts/zipdownload</module>
         <module>../container-base</module>
+        <module>../dataverse-spi</module>
     </modules>
     
     <!-- Transitive dependencies, bigger library "bill of materials" (BOM) and
@@ -130,9 +131,9 @@
  
     <properties>
         <!-- This is a special Maven property name, do not change! -->
-        <revision>5.13</revision>
+        <revision>6.1</revision>
     
-        <target.java.version>11</target.java.version>
+        <target.java.version>17</target.java.version>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <additionalparam>-Xdoclint:none</additionalparam>
         <!-- Needed to avoid IDEA IDE compilation failures. See commits in GH #5059 -->
@@ -147,9 +148,9 @@
         <argLine>-Duser.timezone=${project.timezone} -Dfile.encoding=${project.build.sourceEncoding} -Duser.language=${project.language} -Duser.region=${project.region}</argLine>
     
         <!-- Major system components and dependencies -->
-        <payara.version>5.2022.3</payara.version>
-        <postgresql.version>42.5.1</postgresql.version>
-        <solr.version>8.11.1</solr.version>
+        <payara.version>6.2023.8</payara.version>
+        <postgresql.version>42.6.0</postgresql.version>
+        <solr.version>9.3.0</solr.version>
         <aws.version>1.12.290</aws.version>
         <google.cloud.version>0.177.0</google.cloud.version>
     
@@ -164,32 +165,41 @@
         <apache.httpcomponents.core.version>4.4.14</apache.httpcomponents.core.version>
         
         <!-- NEW gdcc XOAI library implementation -->
-        <gdcc.xoai.version>5.0.0</gdcc.xoai.version>
+        <gdcc.xoai.version>5.2.0</gdcc.xoai.version>
     
         <!-- Testing dependencies -->
-        <testcontainers.version>1.15.0</testcontainers.version>
+        <testcontainers.version>1.19.0</testcontainers.version>
         <smallrye-mpconfig.version>2.10.1</smallrye-mpconfig.version>
-    
-        <junit.version>4.13.1</junit.version>
-        <junit.jupiter.version>5.7.0</junit.jupiter.version>
-        <junit.vintage.version>${junit.jupiter.version}</junit.vintage.version>
-        <mockito.version>2.28.2</mockito.version>
+        <junit.jupiter.version>5.10.0</junit.jupiter.version>
+        <mockito.version>5.4.0</mockito.version>
+        <maven-jacoco-plugin.version>0.8.10</maven-jacoco-plugin.version>
         
         <checkstyle.version>9.3</checkstyle.version>
         
         <!-- Official Maven Plugins -->
-        <maven-compiler-plugin.version>3.8.1</maven-compiler-plugin.version>
-        <maven-jar-plugin.version>3.2.2</maven-jar-plugin.version>
+        <maven-compiler-plugin.version>3.11.0</maven-compiler-plugin.version>
+        <maven-jar-plugin.version>3.3.0</maven-jar-plugin.version>
         <maven-war-plugin.version>3.3.2</maven-war-plugin.version>
-        <maven-dependency-plugin.version>3.2.0</maven-dependency-plugin.version>
-        <maven-install-plugin.version>3.0.0-M1</maven-install-plugin.version>
-        <maven-surefire-plugin.version>3.0.0-M5</maven-surefire-plugin.version>
-        <maven-failsafe-plugin.version>3.0.0-M5</maven-failsafe-plugin.version>
-        <maven-assembly-plugin.version>3.3.0</maven-assembly-plugin.version>
-        <maven-checkstyle-plugin.version>3.1.2</maven-checkstyle-plugin.version>
+        <maven-dependency-plugin.version>3.5.0</maven-dependency-plugin.version>
+        <maven-install-plugin.version>3.1.1</maven-install-plugin.version>
+        <maven-surefire-plugin.version>3.1.0</maven-surefire-plugin.version>
+        <maven-failsafe-plugin.version>3.1.0</maven-failsafe-plugin.version>
+        <maven-assembly-plugin.version>3.6.0</maven-assembly-plugin.version>
+        <maven-resources-plugin.version>3.3.1</maven-resources-plugin.version>
+        <maven-release-plugin.version>3.0.0-M7</maven-release-plugin.version>
+        <maven-gpg-plugin.version>3.0.1</maven-gpg-plugin.version>
+        <maven-site-plugin.version>4.0.0-M4</maven-site-plugin.version>
+        <maven-source-plugin.version>3.2.1</maven-source-plugin.version>
+        <maven-javadoc-plugin.version>3.4.1</maven-javadoc-plugin.version>
+        <maven-flatten-plugin.version>1.3.0</maven-flatten-plugin.version>
+        <maven-enforcer-plugin.version>3.3.0</maven-enforcer-plugin.version>
+        
+        <maven-checkstyle-plugin.version>3.2.2</maven-checkstyle-plugin.version>
+        <nexus-staging-plugin.version>1.6.13</nexus-staging-plugin.version>
+        <pomchecker-maven-plugin.version>1.7.0</pomchecker-maven-plugin.version>
         
         <!-- Container related -->
-        <fabric8-dmp.version>0.40.2</fabric8-dmp.version>
+        <fabric8-dmp.version>0.43.4</fabric8-dmp.version>
     </properties>
     
     <pluginRepositories>
@@ -245,6 +255,16 @@
                     <artifactId>maven-failsafe-plugin</artifactId>
                     <version>${maven-failsafe-plugin.version}</version>
                 </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-resources-plugin</artifactId>
+                    <version>${maven-resources-plugin.version}</version>
+                </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-enforcer-plugin</artifactId>
+                    <version>${maven-enforcer-plugin.version}</version>
+                </plugin>
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-checkstyle-plugin</artifactId>
@@ -257,13 +277,99 @@
                         </dependency>
                     </dependencies>
                 </plugin>
+                <plugin>
+                    <groupId>org.jacoco</groupId>
+                    <artifactId>jacoco-maven-plugin</artifactId>
+                    <version>${maven-jacoco-plugin.version}</version>
+                </plugin>
                 <plugin>
                     <groupId>io.fabric8</groupId>
                     <artifactId>docker-maven-plugin</artifactId>
                     <version>${fabric8-dmp.version}</version>
                 </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-site-plugin</artifactId>
+                    <version>${maven-site-plugin.version}</version>
+                </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-source-plugin</artifactId>
+                    <version>${maven-source-plugin.version}</version>
+                </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-javadoc-plugin</artifactId>
+                    <version>${maven-javadoc-plugin.version}</version>
+                </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-gpg-plugin</artifactId>
+                    <version>${maven-gpg-plugin.version}</version>
+                </plugin>
+                <plugin>
+                    <groupId>org.codehaus.mojo</groupId>
+                    <artifactId>flatten-maven-plugin</artifactId>
+                    <version>${maven-flatten-plugin.version}</version>
+                </plugin>
+                <plugin>
+                    <groupId>org.kordamp.maven</groupId>
+                    <artifactId>pomchecker-maven-plugin</artifactId>
+                    <version>${pomchecker-maven-plugin.version}</version>
+                </plugin>
+                <plugin>
+                    <groupId>org.sonatype.plugins</groupId>
+                    <artifactId>nexus-staging-maven-plugin</artifactId>
+                    <version>${nexus-staging-plugin.version}</version>
+                </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-release-plugin</artifactId>
+                    <version>${maven-release-plugin.version}</version>
+                </plugin>
             </plugins>
         </pluginManagement>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>no-junit4</id>
+                        <phase>generate-test-resources</phase>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <configuration>
+                            <rules>
+                                <bannedDependencies>
+                                    <excludes>
+                                        <exclude>junit:junit:*:*:test</exclude>
+                                        <exclude>org.junit:junit:*:*:test</exclude>
+                                        <exclude>org.junit.vintage:*:*:*:test</exclude>
+                                    </excludes>
+                                </bannedDependencies>
+                            </rules>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>general-reqs</id>
+                        <goals>
+                            <goal>enforce</goal>
+                        </goals>
+                        <phase>initialize</phase>
+                        <configuration>
+                            <rules>
+                                <banDuplicatePomDependencyVersions/>
+                                <requireJavaVersion>
+                                    <version>[${target.java.version}.0,)</version>
+                                </requireJavaVersion>
+                            </rules>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
     </build>
     
     <!--Maven checks for dependencies from these repos in the order shown in the pom.xml
@@ -319,15 +425,21 @@
             <name>Unidata All</name>
             <url>https://artifacts.unidata.ucar.edu/repository/unidata-all/</url>
         </repository>
-        <repository>
-            <id>dvn.private</id>
-            <name>Local repository for hosting jars not available from network repositories.</name>
-            <url>file://${project.basedir}/local_lib</url>
-        </repository>
-        <!-- Uncomment when using snapshot releases from Maven Central 
+        <!-- Uncomment when using snapshot releases from Maven Central -->
+        <!--
         <repository>
             <id>oss-sonatype</id>
             <name>oss-sonatype</name>
+            <url>
+                https://oss.sonatype.org/content/repositories/snapshots/
+            </url>
+            <snapshots>
+                <enabled>true</enabled>
+            </snapshots>
+        </repository>
+        <repository>
+            <id>s01-oss-sonatype</id>
+            <name>s01-oss-sonatype</name>
             <url>
                 https://s01.oss.sonatype.org/content/repositories/snapshots/
             </url>
@@ -343,10 +455,9 @@
             <id>ct</id>
             <properties>
                 <!--
-                    Payara 5.2022.3 has problems with postboot deployment scripts.
-                    Fixed in this release, see https://github.com/payara/Payara/pull/5991
+                    With moving to Payara 6, we are aligned with containers and classic install again.
                 -->
-                <payara.version>5.2022.4</payara.version>
+                <!-- <payara.version>5.2022.5</payara.version> -->
             </properties>
     
             <build>
diff --git a/modules/dataverse-spi/.gitignore b/modules/dataverse-spi/.gitignore
new file mode 100644
index 00000000000..d75620abf70
--- /dev/null
+++ b/modules/dataverse-spi/.gitignore
@@ -0,0 +1 @@
+.flattened-pom.xml
diff --git a/modules/dataverse-spi/pom.xml b/modules/dataverse-spi/pom.xml
new file mode 100644
index 00000000000..b00053fe5e0
--- /dev/null
+++ b/modules/dataverse-spi/pom.xml
@@ -0,0 +1,238 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    
+    <parent>
+        <groupId>edu.harvard.iq</groupId>
+        <artifactId>dataverse-parent</artifactId>
+        <version>${revision}</version>
+        <relativePath>../dataverse-parent</relativePath>
+    </parent>
+    
+    <groupId>io.gdcc</groupId>
+    <artifactId>dataverse-spi</artifactId>
+    <version>2.0.0${project.version.suffix}</version>
+    <packaging>jar</packaging>
+    
+    <name>Dataverse SPI Plugin API</name>
+    <url>https://dataverse.org</url>
+    <description>
+        A package to create out-of-tree Java code for Dataverse Software. Plugin projects can use this package
+        as an API dependency just like Jakarta EE APIs if they want to create external plugins. These will be loaded
+        at runtime of a Dataverse installation using SPI. See also https://guides.dataverse.org/en/latest/developers
+        for more information.
+    </description>
+    
+    <licenses>
+        <license>
+            <name>Apache-2.0</name>
+            <url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+    
+    <developers>
+        <developer>
+            <name>Dataverse Core Team</name>
+            <email>support@dataverse.org</email>
+        </developer>
+    </developers>
+    
+    <issueManagement>
+        <url>https://github.com/IQSS/dataverse/issues</url>
+        <system>GitHub Issues</system>
+    </issueManagement>
+    
+    <scm>
+        <connection>scm:git:git@github.com:IQSS/dataverse.git</connection>
+        <developerConnection>scm:git:git@github.com:IQSS/dataverse.git</developerConnection>
+        <url>git@github.com:IQSS/dataverse.git</url>
+        <tag>HEAD</tag>
+    </scm>
+    
+    <ciManagement>
+        <url>https://github.com/IQSS/dataverse/actions</url>
+        <system>github</system>
+        <notifiers>
+            <notifier>
+                <address>dataversebot@gdcc.io</address>
+            </notifier>
+        </notifiers>
+    </ciManagement>
+    
+    <distributionManagement>
+        <snapshotRepository>
+            <id>ossrh</id>
+            <url>https://s01.oss.sonatype.org/content/repositories/snapshots</url>
+        </snapshotRepository>
+        <repository>
+            <id>ossrh</id>
+            <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/</url>
+        </repository>
+    </distributionManagement>
+    
+    <properties>
+        <!-- This property may be used to append a string to the version number from command line -->
+        <project.version.suffix></project.version.suffix>
+        <javadoc.lint>none</javadoc.lint>
+        <skipDeploy>false</skipDeploy>
+    </properties>
+    
+    <dependencies>
+        <dependency>
+            <groupId>jakarta.json</groupId>
+            <artifactId>jakarta.json-api</artifactId>
+            <scope>provided</scope>
+            <!-- no version here as managed by parent -->
+        </dependency>
+        <dependency>
+            <groupId>jakarta.ws.rs</groupId>
+            <artifactId>jakarta.ws.rs-api</artifactId>
+            <scope>provided</scope>
+            <!-- no version here as managed by parent -->
+        </dependency>
+    </dependencies>
+    
+    <build>
+        <plugins>
+            <plugin>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <configuration>
+                    <release>${target.java.version}</release>
+                </configuration>
+            </plugin>
+            
+            <!-- RELEASING -->
+            <plugin>
+                <groupId>org.sonatype.plugins</groupId>
+                <artifactId>nexus-staging-maven-plugin</artifactId>
+                <extensions>true</extensions>
+                <configuration>
+                    <serverId>ossrh</serverId>
+                    <nexusUrl>https://s01.oss.sonatype.org</nexusUrl>
+                    <autoReleaseAfterClose>true</autoReleaseAfterClose>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-release-plugin</artifactId>
+                <configuration>
+                    <useReleaseProfile>false</useReleaseProfile>
+                    <releaseProfiles>release</releaseProfiles>
+                    <autoVersionSubmodules>true</autoVersionSubmodules>
+                    <goals>deploy</goals>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>flatten-maven-plugin</artifactId>
+                <configuration>
+                    <updatePomFile>true</updatePomFile>
+                    <flattenMode>oss</flattenMode>
+                    <pomElements>
+                        <distributionManagement>remove</distributionManagement>
+                        <repositories>remove</repositories>
+                    </pomElements>
+                </configuration>
+                <executions>
+                    <!-- enable flattening -->
+                    <execution>
+                        <id>flatten</id>
+                        <phase>process-resources</phase>
+                        <goals>
+                            <goal>flatten</goal>
+                        </goals>
+                    </execution>
+                    <!-- ensure proper cleanup -->
+                    <execution>
+                        <id>flatten.clean</id>
+                        <phase>clean</phase>
+                        <goals>
+                            <goal>clean</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-deploy-plugin</artifactId>
+                <configuration>
+                    <skip>${skipDeploy}</skip>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+    
+    <profiles>
+        <profile>
+            <id>release</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-gpg-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>sign-artifacts</id>
+                                <phase>verify</phase>
+                                <goals>
+                                    <goal>sign</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <plugin>
+                        <groupId>org.kordamp.maven</groupId>
+                        <artifactId>pomchecker-maven-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <phase>process-resources</phase>
+                                <goals>
+                                    <goal>check-maven-central</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                         
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-javadoc-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>attach-javadocs</id>
+                                <goals>
+                                    <goal>jar</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                        <configuration>
+                            <source>${target.java.version}</source>
+                            <detectJavaApiLink>false</detectJavaApiLink>
+                            <doclint>${javadoc.lint}</doclint>
+                        </configuration>
+                    </plugin>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-source-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>attach-sources</id>
+                                <goals>
+                                    <goal>jar</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+        <profile>
+            <id>ct</id>
+            <properties>
+                <skipDeploy>true</skipDeploy>
+            </properties>
+        </profile>
+    </profiles>
+</project>
diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java
new file mode 100644
index 00000000000..d039ac39e8f
--- /dev/null
+++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportDataProvider.java
@@ -0,0 +1,96 @@
+package io.gdcc.spi.export;
+
+import java.io.InputStream;
+import java.util.Optional;
+
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+
+/**
+ * Provides all the metadata Dataverse has about a given dataset that can then
+ * be used by an @see Exporter to create a new metadata export format.
+ * 
+ */
+public interface ExportDataProvider {
+
+    /**
+     * @return - dataset metadata in the standard Dataverse JSON format used in the
+     *         API and available as the JSON metadata export via the user interface.
+     * @apiNote - there is no JSON schema defining this output, but the format is
+     *          well documented in the Dataverse online guides. This, and the
+     *          OAI_ORE export are the only two that provide 'complete'
+     *          dataset-level metadata along with basic file metadata for each file
+     *          in the dataset.
+     */
+    JsonObject getDatasetJson();
+
+    /**
+     * 
+     * @return - dataset metadata in the JSON-LD based OAI_ORE format used in
+     *         Dataverse's archival bag export mechanism and as available in the
+     *         user interface and by API.
+     * @apiNote - THis, and the JSON format are the only two that provide complete
+     *          dataset-level metadata along with basic file metadata for each file
+     *          in the dataset.
+     */
+    JsonObject getDatasetORE();
+
+    /**
+     * Dataverse is capable of extracting DDI-centric metadata from tabular
+     * datafiles. This detailed metadata, which is only available for successfully
+     * "ingested" tabular files, is not included in the output of any other methods
+     * in this interface.
+     * 
+     * @return - a JSONArray with one entry per ingested tabular dataset file.
+     * @apiNote - there is no JSON schema available for this output and the format
+     *          is not well documented. Implementers may wish to expore the @see
+     *          edu.harvard.iq.dataverse.export.DDIExporter and the @see
+     *          edu.harvard.iq.dataverse.util.json.JSONPrinter classes where this
+     *          output is used/generated (respectively).
+     */
+    JsonArray getDatasetFileDetails();
+
+    /**
+     * 
+     * @return - the subset of metadata conforming to the schema.org standard as
+     *         available in the user interface and as included as header metadata in
+     *         dataset pages (for use by search engines)
+     * @apiNote - as this metadata export is not complete, it should only be used as
+     *          a starting point for an Exporter if it simplifies your exporter
+     *          relative to using the JSON or OAI_ORE exports.
+     */
+    JsonObject getDatasetSchemaDotOrg();
+
+    /**
+     * 
+     * @return - the subset of metadata conforming to the DataCite standard as
+     *         available in the Dataverse user interface and as sent to DataCite when DataCite DOIs are used.
+     * @apiNote - as this metadata export is not complete, it should only be used as
+     *          a starting point for an Exporter if it simplifies your exporter
+     *          relative to using the JSON or OAI_ORE exports.
+     */
+    String getDataCiteXml();
+
+    /**
+     * If an Exporter has specified a prerequisite format name via the
+     * getPrerequisiteFormatName() method, it can call this method to retrieve
+     * metadata in that format.
+     * 
+     * @return - metadata in the specified prerequisite format (if available from
+     *         another internal or added Exporter) as an Optional<InputStream>
+     * @apiNote - This functionality is intended as way to easily generate alternate
+     *          formats of the ~same metadata, e.g. to support download as XML,
+     *          HTML, PDF for a specific metadata standard (e.g. DDI). It can be
+     *          particularly useful, reative to starting from the output of one of
+     *          the getDataset* methods above, if there are existing libraries that
+     *          can convert between these formats. Note that, since Exporters can be
+     *          replaced, relying on this method could cause your Exporter to
+     *          malfunction, e.g. if you depend on format "ddi" and a third party
+     *          Exporter is configured to replace the internal ddi Exporter in
+     *          Dataverse.
+     */
+    default Optional<InputStream> getPrerequisiteInputStream() {
+        return Optional.empty();
+    }
+
+}
diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportException.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportException.java
new file mode 100644
index 00000000000..c816a605860
--- /dev/null
+++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/ExportException.java
@@ -0,0 +1,13 @@
+package io.gdcc.spi.export;
+
+import java.io.IOException;
+
+public class ExportException extends IOException {
+    public ExportException(String message) {
+        super(message);
+    }
+
+    public ExportException(String message, Throwable cause) {
+        super(message, cause);
+    }
+}
diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/Exporter.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/Exporter.java
new file mode 100644
index 00000000000..1338a3c9734
--- /dev/null
+++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/Exporter.java
@@ -0,0 +1,110 @@
+package io.gdcc.spi.export;
+
+import java.io.OutputStream;
+import java.util.Locale;
+import java.util.Optional;
+
+
+/**
+ * Dataverse allows new metadata export formats to be dynamically added a running instance. This is done by
+ * deploying new classes that implement this Exporter interface.
+ */
+
+public interface Exporter {
+
+
+    /**
+     * When this method is called, the Exporter should write the metadata to the given OutputStream.
+     * 
+     * @apiNote When implementing exportDataset, when done writing content, please make sure
+     * to flush() the outputStream, but NOT close() it! This way an exporter can be
+     * used to insert the produced metadata into the body of an HTTP response, etc.
+     * (for example, to insert it into the body of an OAI response, where more XML
+     * needs to be written, for the outer OAI-PMH record). -- L.A. 4.5
+     * 
+     * @param dataProvider - the @see ExportDataProvider interface includes several methods that can be used to retrieve the dataset metadata in different formats. An Exporter should use one or more of these to obtain the values needed to generate metadata in the format it supports. 
+     * @param outputStream - the OutputStream to write the metadata to
+     * @throws ExportException - if there is an error writing the metadata
+     */
+    void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException;
+
+    /**
+     * This method should return the name of the metadata format this Exporter
+     * provides.
+     * 
+     * @apiNote Format names are unique identifiers for the formats supported in
+     *          Dataverse. Reusing the same format name as another Exporter will
+     *          result only one implementation being available. Exporters packaged
+     *          as an external Jar file have precedence over the default
+     *          implementations in Dataverse. Hence re-using one of the existing
+     *          format names will result in the Exporter replacing the internal one
+     *          with the same name. The precedence between two external Exporters
+     *          using the same format name is not defined.
+     *          Current format names used internally by Dataverse are:
+     *          Datacite
+     *          dcterms
+     *          ddi
+     *          oai_dc
+     *          html
+     *          dataverse_json
+     *          oai_ddi
+     *          OAI_ORE
+     *          oai_datacite
+     *          schema.org
+     *          
+     * @return - the unique name of the metadata format this Exporter
+     */
+    String getFormatName();
+
+    /**
+     * This method should return the display name of the metadata format this
+     * Exporter provides. Display names are used in the UI, specifically in the menu
+     * of avaiable Metadata Exports on the dataset page/metadata tab to identify the
+     * format.
+     */
+    String getDisplayName(Locale locale);
+
+    /**
+     * Exporters can specify that they require, as input, the output of another
+     * exporter. This is done by providing the name of that format in response to a
+     * call to this method.
+     * 
+     * @implNote The one current example where this is done is with the html(display
+     *           name "DDI html codebook") exporter which starts from the XML-based
+     *           ddi format produced by that exporter.
+     * @apiNote - The Exporter can expect that the metadata produced by its
+     *          prerequisite exporter (as defined with this method) will be
+     *          available via the ExportDataProvider.getPrerequisiteInputStream()
+     *          method. The default implementation of this method returns an empty
+     *          value which means the getPrerequisiteInputStream() method of the
+     *          ExportDataProvider sent in the exportDataset method will return an
+     *          empty Optional<InputStream>.
+     * 
+     */
+    default Optional<String> getPrerequisiteFormatName() {
+        return Optional.empty();
+    }
+
+
+    /**
+     * Harvestable Exporters will be available as options in Dataverse's Harvesting mechanism.
+     * @return true to make this exporter available as a harvesting option.
+     */
+    Boolean isHarvestable();
+
+    /**
+     * If an Exporter is available to users, its format will be generated for every
+     * published dataset and made available via the dataset page/metadata
+     * tab/Metadata Exports menu item and via the API.
+     * @return true to make this exporter available to users.
+     */
+    Boolean isAvailableToUsers();
+
+    /**
+     * To support effective downloads of metadata in this Exporter's format, the Exporter should specify an appropriate mime type.
+     * @apiNote - It is recommended to used the @see javax.ws.rs.core.MediaType enum to specify the mime type.
+     * @return The mime type, e.g. "application/json", "text/plain", etc.
+     */
+    String getMediaType();
+
+}
diff --git a/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java
new file mode 100644
index 00000000000..3c3fa35c69d
--- /dev/null
+++ b/modules/dataverse-spi/src/main/java/io/gdcc/spi/export/XMLExporter.java
@@ -0,0 +1,37 @@
+package io.gdcc.spi.export;
+
+import jakarta.ws.rs.core.MediaType;
+
+/**
+ * XML Exporter is an extension of the base Exporter interface that adds the
+ * additional methods needed for generating XML metadata export formats.
+ */
+public interface XMLExporter extends Exporter {
+
+    /**
+     * @implNote for the ddi exporter, this method returns "ddi:codebook:2_5"
+     * @return - the name space of the XML schema
+     */
+    String getXMLNameSpace();
+
+    /**
+     * @apiNote According to the XML specification, the value must be a URI
+     * @implNote for the ddi exporter, this method returns
+     *           "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd"
+     * @return - the location of the XML schema as a String (must be a valid URI)
+     */
+    String getXMLSchemaLocation();
+
+    /**
+     * @implNote for the ddi exporter, this method returns "2.5"
+     * @return - the version of the XML schema
+     */
+    String getXMLSchemaVersion();
+
+    /**
+     * @return - should always be MediaType.APPLICATION_XML
+     */
+    public default String getMediaType() {
+        return MediaType.APPLICATION_XML;
+    };
+}
diff --git a/modules/nginx/Dockerfile b/modules/nginx/Dockerfile
new file mode 100644
index 00000000000..3900076599f
--- /dev/null
+++ b/modules/nginx/Dockerfile
@@ -0,0 +1,9 @@
+FROM nginx:latest
+
+# Remove the default NGINX configuration file
+RUN rm /etc/nginx/conf.d/default.conf
+
+# Copy the contents of the local default.conf to the container
+COPY default.conf /etc/nginx/conf.d/
+
+EXPOSE 4849
\ No newline at end of file
diff --git a/modules/nginx/README.md b/modules/nginx/README.md
new file mode 100644
index 00000000000..9d2ff785577
--- /dev/null
+++ b/modules/nginx/README.md
@@ -0,0 +1,7 @@
+# nginx proxy
+
+nginx can be used to proxy various services at other ports/protocols from docker.
+
+Currently, this is used to work around a problem with the IntelliJ Payara plugin, which doesn't allow remote redeployment in case the Payara admin is served via HTTPS using a self-signed certificate, which is the case of the default dataverse container installation. This configuration provides an HTTP endpoint at port 4849, and proxies requests to the Payara admin console's HTTPS 4848 endpoint. From the IntelliJ Payara plugin one has to specify the localhost 4849 port (without SSL).
+
+![img.png](img.png)
diff --git a/modules/nginx/default.conf b/modules/nginx/default.conf
new file mode 100644
index 00000000000..8381a66c19a
--- /dev/null
+++ b/modules/nginx/default.conf
@@ -0,0 +1,12 @@
+server {
+    listen 4849;
+
+    # Make it big, so that .war files can be submitted
+    client_max_body_size 300M;
+
+    location / {
+        proxy_pass https://dataverse:4848;
+        proxy_ssl_verify off;
+        proxy_ssl_server_name on;
+    }
+}
diff --git a/pom.xml b/pom.xml
index 8b6f98c5896..34b0ad2e835 100644
--- a/pom.xml
+++ b/pom.xml
@@ -15,14 +15,20 @@
     doc/sphinx-guides/source/developers/dependencies.rst
     -->
     <artifactId>dataverse</artifactId>
-    <packaging>war</packaging>
+    <packaging>${packaging.type}</packaging>
     <name>dataverse</name>
     <properties>
         <skipUnitTests>false</skipUnitTests>
+        <skipIntegrationTests>false</skipIntegrationTests>
+        <it.groups>integration</it.groups>
+        
+        <!-- By default, this module will produce a WAR file. -->
+        <!-- This will be switched within the container profile! -->
+        <packaging.type>war</packaging.type>
+        
         <reload4j.version>1.2.18.4</reload4j.version>
-        <flyway.version>8.5.10</flyway.version>
+        <flyway.version>9.21.2</flyway.version>
         <jhove.version>1.20.1</jhove.version>
-        <jacoco.version>0.8.7</jacoco.version>
         <poi.version>5.2.1</poi.version>
         <tika.version>2.4.1</tika.version>
         <netcdf.version>5.5.3</netcdf.version>
@@ -63,7 +69,7 @@
             <scope>runtime</scope>
         </dependency>
         
-	    <dependency>
+        <dependency>
             <groupId>org.passay</groupId>
             <artifactId>passay</artifactId>
             <version>1.6.0</version>
@@ -90,7 +96,7 @@
         <dependency>
             <groupId>io.gdcc</groupId>
             <artifactId>sword2-server</artifactId>
-            <version>1.2.1</version>
+            <version>2.0.0</version>
         </dependency>
         <!-- Dependency to use sword2-server in our codebase -->
         <dependency>
@@ -113,7 +119,7 @@
         <dependency>
           <groupId>com.apicatalog</groupId>
           <artifactId>titanium-json-ld</artifactId>
-          <version>1.3.0-SNAPSHOT</version>
+          <version>1.3.2</version>
         </dependency>
         <dependency>
             <groupId>com.google.code.gson</groupId>
@@ -154,12 +160,20 @@
             <artifactId>flyway-core</artifactId>
             <version>${flyway.version}</version>
         </dependency>
+        <!-- Enable resolution of the JPA provider in persistence.xml -->
+        <dependency>
+            <groupId>org.eclipse.persistence</groupId>
+            <artifactId>org.eclipse.persistence.jpa</artifactId>
+            <scope>provided</scope>
+        </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>
             <artifactId>guava</artifactId>
-            <version>29.0-jre</version>
+            <version>32.1.2-jre</version>
             <type>jar</type>
         </dependency>
+        
+        <!-- Jakarta EE & Eclipse MicroProfile base dependencies -->
         <dependency>
             <groupId>org.eclipse.microprofile.config</groupId>
             <artifactId>microprofile-config-api</artifactId>
@@ -168,21 +182,35 @@
         <dependency>
             <groupId>jakarta.platform</groupId>
             <artifactId>jakarta.jakartaee-api</artifactId>
-            <version>${jakartaee-api.version}</version>
             <scope>provided</scope>
         </dependency>
-        <!-- JSON-P -->
+        
+        <!-- Jakarta Activation, MIME support etc -->
+        <!-- Runtime implementation here only, as necessary for testing. -->
         <dependency>
-            <groupId>org.glassfish</groupId>
-            <artifactId>jakarta.json</artifactId>
+            <groupId>org.eclipse.angus</groupId>
+            <artifactId>angus-activation</artifactId>
             <scope>provided</scope>
             <!-- no version here as managed by Payara BOM above! -->
         </dependency>
         <dependency>
-            <groupId>com.sun.mail</groupId>
-            <artifactId>jakarta.mail</artifactId>
+            <groupId>fish.payara.api</groupId>
+            <artifactId>payara-api</artifactId>
             <scope>provided</scope>
+            <!-- Note: The version was provided by Payara BOM till 6.2023.7, when they removed the Core BOM from it as
+                       meant for internal use only. Simply referencing the version property here solves the problem. -->
+            <version>${payara.version}</version>
         </dependency>
+        
+        <!-- JSON-P -->
+        <!-- Runtime implementation here only, as necessary for testing. -->
+        <dependency>
+            <groupId>org.eclipse.parsson</groupId>
+            <artifactId>jakarta.json</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        
+        <!-- Jakarta Faces & related -->
         <dependency>
             <groupId>org.glassfish</groupId>
             <artifactId>jakarta.faces</artifactId>
@@ -192,6 +220,7 @@
             <groupId>org.primefaces</groupId>
             <artifactId>primefaces</artifactId>
             <version>11.0.0</version>
+            <classifier>jakarta</classifier>
         </dependency>
         <dependency>
             <groupId>org.primefaces.themes</groupId>
@@ -201,9 +230,10 @@
         <dependency>
             <groupId>org.omnifaces</groupId>
             <artifactId>omnifaces</artifactId>
-            <version>3.8</version> <!-- Or 1.8-SNAPSHOT -->
+            <version>4.0-M13</version>
         </dependency>
         
+        <!-- Jakarta Validation API & runtime -->
         <dependency>
             <groupId>jakarta.validation</groupId>
             <artifactId>jakarta.validation-api</artifactId>
@@ -214,9 +244,12 @@
             <artifactId>hibernate-validator</artifactId>
             <scope>provided</scope>
         </dependency>
+    
+        <!-- Jakarta Expression Language -->
+        <!-- Runtime implementation here only, as necessary for testing. -->
         <dependency>
-            <groupId>org.glassfish</groupId>
-            <artifactId>jakarta.el</artifactId>
+            <groupId>org.glassfish.expressly</groupId>
+            <artifactId>expressly</artifactId>
             <scope>provided</scope>
         </dependency>
         
@@ -250,29 +283,23 @@
         <dependency>
             <groupId>org.apache.solr</groupId>
             <artifactId>solr-solrj</artifactId>
-            <version>8.11.1</version>
+            <version>9.3.0</version>
         </dependency>
         <dependency>
             <groupId>colt</groupId>
             <artifactId>colt</artifactId>
             <version>1.2.0</version>
         </dependency>
-        <!-- fits.jar, not available from network repos, supplied in local_lib -->
+        <!-- FITS -->
         <dependency>
-            <groupId>nom.tam.fits</groupId>
-            <artifactId>fits</artifactId>
-            <version>2012-10-25-generated</version>
+            <groupId>gov.nasa.gsfc.heasarc</groupId>
+            <artifactId>nom-tam-fits</artifactId>
+            <version>1.12.0</version>
         </dependency>
         <dependency>
             <groupId>net.handle</groupId>
-            <artifactId>handle</artifactId>
-            <version>8.1.1</version>
-        </dependency>
-        <!-- UNF v5 (buggy), (temporarily) added for testing ingest against DVN v3  - L.A. -->
-        <dependency>
-            <groupId>edu.harvard.iq.dvn</groupId>
-            <artifactId>unf5</artifactId>
-            <version>5.0</version>
+            <artifactId>handle-client</artifactId>
+            <version>9.3.1</version>
         </dependency>
         <!-- (new) UNF v6: -->
         <dependency>
@@ -332,18 +359,24 @@
         <dependency>
             <groupId>org.ocpsoft.rewrite</groupId>
             <artifactId>rewrite-servlet</artifactId>
-            <version>3.5.0.Final</version>
+            <version>10.0.0.Final</version>
         </dependency>
         <dependency>
             <groupId>org.ocpsoft.rewrite</groupId>
             <artifactId>rewrite-config-prettyfaces</artifactId>
-            <version>3.5.0.Final</version>
+            <version>10.0.0.Final</version>
         </dependency>
         <dependency>
             <groupId>edu.ucsb.nceas</groupId>
             <artifactId>ezid</artifactId>
             <version>1.0.0</version>
             <type>jar</type>
+            <exclusions>
+                <exclusion>
+                    <groupId>junit</groupId>
+                    <artifactId>junit</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
         <dependency>
             <groupId>org.jsoup</groupId>
@@ -381,9 +414,16 @@
         <dependency>
             <groupId>com.nimbusds</groupId>
             <artifactId>oauth2-oidc-sdk</artifactId>
-            <version>9.41.1</version>
+            <version>10.13.2</version>
         </dependency>
-        <!-- New and Improved GDCC XOAI library! --> 
+        <!-- Caching library, current main use case is for OIDC authentication -->
+        <dependency>
+            <groupId>com.github.ben-manes.caffeine</groupId>
+            <artifactId>caffeine</artifactId>
+            <version>3.1.8</version>
+        </dependency>
+        
+        <!-- New and Improved GDCC XOAI library! -->
         <dependency>
             <groupId>io.gdcc</groupId>
             <artifactId>xoai-data-provider</artifactId>
@@ -404,15 +444,13 @@
         </dependency>
         <!-- For API File Upload: 1 of 2 -->
         <dependency>
-            <groupId>org.glassfish.jersey.containers</groupId>
-            <artifactId>jersey-container-servlet</artifactId>
-            <version>2.23.2</version>
+            <groupId>org.glassfish.jersey.core</groupId>
+            <artifactId>jersey-server</artifactId>
         </dependency>
         <!-- For API File Upload: 2 of 2 -->
         <dependency>
             <groupId>org.glassfish.jersey.media</groupId>
             <artifactId>jersey-media-multipart</artifactId>
-            <version>2.23.2</version>
         </dependency>
         <dependency>
             <groupId>com.mashape.unirest</groupId>
@@ -499,7 +537,11 @@
             <artifactId>cdm-core</artifactId>
             <version>${netcdf.version}</version>
         </dependency>
-
+        <dependency>
+            <groupId>io.gdcc</groupId>
+            <artifactId>dataverse-spi</artifactId>
+            <version>2.0.0</version>
+        </dependency>
         <!-- TESTING DEPENDENCIES -->
         <dependency>
             <groupId>org.junit.jupiter</groupId>
@@ -507,18 +549,6 @@
             <version>${junit.jupiter.version}</version>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>${junit.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.junit.vintage</groupId>
-            <artifactId>junit-vintage-engine</artifactId>
-            <version>${junit.vintage.version}</version>
-            <scope>test</scope>
-        </dependency>
         <dependency>
             <groupId>org.hamcrest</groupId>
             <artifactId>hamcrest-library</artifactId>
@@ -538,9 +568,9 @@
             <scope>test</scope>
         </dependency>
         <dependency>
-            <groupId>com.jayway.restassured</groupId>
+            <groupId>io.rest-assured</groupId>
             <artifactId>rest-assured</artifactId>
-            <version>2.4.0</version>
+            <version>5.3.1</version>
             <scope>test</scope>
         </dependency>
         <dependency>
@@ -559,6 +589,12 @@
             <groupId>org.testcontainers</groupId>
             <artifactId>testcontainers</artifactId>
             <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>junit</groupId>
+                    <artifactId>junit</artifactId>
+                </exclusion>
+            </exclusions>
         </dependency>
         <dependency>
             <groupId>org.testcontainers</groupId>
@@ -570,6 +606,29 @@
             <artifactId>postgresql</artifactId>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>com.github.dasniko</groupId>
+            <artifactId>testcontainers-keycloak</artifactId>
+            <version>3.0.0</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.testcontainers</groupId>
+            <artifactId>localstack</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <!--
+            Brute force solution until we are on Jakarta EE 10.
+            Otherwise, we get very cryptic errors about missing bundle files on test runs.
+            See also https://github.com/jakartaee/jakartaee-api/issues/61
+        -->
+        <dependency>
+            <groupId>jakarta.servlet</groupId>
+            <artifactId>jakarta.servlet-api</artifactId>
+            <version>4.0.4</version>
+            <scope>test</scope>
+        </dependency>
+        
         <dependency>
             <groupId>org.mockito</groupId>
             <artifactId>mockito-core</artifactId>
@@ -588,24 +647,23 @@
             <version>${smallrye-mpconfig.version}</version>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.htmlunit</groupId>
+            <artifactId>htmlunit</artifactId>
+            <version>3.2.0</version>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
     <build>
-        <!--        <testResources>
-            <testResource>
-                <directory>${project.basedir}/src/main/resources</directory>
-            </testResource>
-        </testResources>-->
-        <!--        <testResources>
+        <testResources>
             <testResource>
-              <directory>${project.basedir}/src/test/java</directory>
-              <excludes>
-                  <exclude>**/*.java</exclude>
-              </excludes>
+                <directory>${project.basedir}/src/test/resources</directory>
             </testResource>
             <testResource>
-              <directory>${project.basedir}/src/test/resources</directory>
+                <directory>${project.basedir}/conf/keycloak</directory>
+                <targetPath>keycloak</targetPath>
             </testResource>
-          </testResources>-->
+        </testResources>
         <resources>
             <resource>
                 <directory>src/main/java</directory>
@@ -683,24 +741,94 @@
             <plugin>
                 <groupId>org.jacoco</groupId>
                 <artifactId>jacoco-maven-plugin</artifactId>
-                <version>${jacoco.version}</version>
-                <configuration>
-                    <destfile>${basedir}/target/coverage-reports/jacoco-unit.exec</destfile>
-                    <datafile>${basedir}/target/coverage-reports/jacoco-unit.exec</datafile>
-                </configuration>
                 <executions>
                     <execution>
                         <id>jacoco-initialize</id>
                         <goals>
                             <goal>prepare-agent</goal>
                         </goals>
+                        <configuration>
+                            <destFile>${project.build.directory}/coverage-reports/jacoco-unit.exec</destFile>
+                            <propertyName>surefire.jacoco.args</propertyName>
+                            <skip>${skipUnitTests}</skip>
+                            <includes>
+                                <include>edu/harvard/iq/dataverse/*</include>
+                                <include>io/gdcc/*</include>
+                                <include>org/dataverse/*</include>
+                            </includes>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>jacoco-after-unit</id>
+                        <phase>test</phase>
+                        <goals>
+                            <goal>report</goal>
+                        </goals>
+                        <configuration>
+                            <dataFile>${project.build.directory}/coverage-reports/jacoco-unit.exec</dataFile>
+                            <outputDirectory>${project.reporting.outputDirectory}/jacoco-unit-test-coverage-report</outputDirectory>
+                            <skip>${skipUnitTests}</skip>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>jacoco-initialize-it</id>
+                        <phase>pre-integration-test</phase>
+                        <goals>
+                            <goal>prepare-agent</goal>
+                        </goals>
+                        <configuration>
+                            <destFile>${project.build.directory}/coverage-reports/jacoco-integration.exec</destFile>
+                            <propertyName>failsafe.jacoco.args</propertyName>
+                            <skip>${skipIntegrationTests}</skip>
+                            <includes>
+                                <include>edu/harvard/iq/dataverse/*</include>
+                                <include>io/gdcc/*</include>
+                                <include>org/dataverse/*</include>
+                            </includes>
+                        </configuration>
                     </execution>
                     <execution>
-                        <id>jacoco-site</id>
-                        <phase>package</phase>
+                        <id>jacoco-after-it</id>
+                        <phase>post-integration-test</phase>
                         <goals>
                             <goal>report</goal>
                         </goals>
+                        <configuration>
+                            <dataFile>${project.build.directory}/coverage-reports/jacoco-integration.exec</dataFile>
+                            <outputDirectory>${project.reporting.outputDirectory}/jacoco-integration-test-coverage-report</outputDirectory>
+                            <skip>${skipIntegrationTests}</skip>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>jacoco-merge-unit-and-it</id>
+                        <phase>post-integration-test</phase>
+                        <goals>
+                            <goal>merge</goal>
+                        </goals>
+                        <configuration>
+                            <fileSets>
+                                <fileSet>
+                                    <directory>${project.build.directory}/coverage-reports/</directory>
+                                    <includes>
+                                        <include>*.exec</include>
+                                    </includes>
+                                </fileSet>
+                            </fileSets>
+                            <destFile>${project.build.directory}/coverage-reports/merged.exec</destFile>
+                            <skip>${skipIntegrationTests}</skip>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>jacoco-report</id>
+                        <phase>post-integration-test</phase>
+                        <goals>
+                            <goal>report</goal>
+                        </goals>
+                        <configuration>
+                            <dataFile>${project.build.directory}/coverage-reports/merged.exec</dataFile>
+                            <outputDirectory>${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report</outputDirectory>
+                            <skip>${skipIntegrationTests}</skip>
+                        </configuration>
                     </execution>
                 </executions>
             </plugin>
@@ -715,6 +843,9 @@
                         <version>2.3.1</version>
                     </dependency>
                 </dependencies>
+                <configuration>
+                    <jacocoReports>${project.reporting.outputDirectory}/jacoco-merged-test-coverage-report/jacoco.xml</jacocoReports>
+                </configuration>
             </plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
@@ -724,7 +855,26 @@
                     <!-- testsToExclude come from the profile-->
                     <excludedGroups>${testsToExclude}</excludedGroups>
                     <skip>${skipUnitTests}</skip>
+                    <argLine>${surefire.jacoco.args} ${argLine}</argLine>
+                </configuration>
+            </plugin>
+            <!-- Run mvn verify to execute these Testcontainers based integration tests. Needs Docker! -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <configuration>
+                    <groups>${it.groups}</groups>
+                    <argLine>${failsafe.jacoco.args} ${argLine}</argLine>
+                    <skip>${skipIntegrationTests}</skip>
                 </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>integration-test</goal>
+                            <goal>verify</goal>
+                        </goals>
+                    </execution>
+                </executions>
             </plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
@@ -746,7 +896,7 @@
                 <activeByDefault>true</activeByDefault>
             </activation>
             <properties>
-                <testsToExclude>edu.harvard.iq.dataverse.NonEssentialTests</testsToExclude>
+                <testsToExclude>not-essential-unittests</testsToExclude>
             </properties>
         </profile>
         <profile>
@@ -754,22 +904,136 @@
         </profile>
         <!-- TODO: Add a profile to run API tests (integration tests that end in IT.java. See conf/docker-aio/run-test-suite.sh -->
         <profile>
-            <id>tc</id>
+            <id>ct</id>
             <properties>
+                <!-- Let's go FAST here - building the image should be quick to do by default. -->
                 <skipUnitTests>true</skipUnitTests>
-                <postgresql.server.version>9.6</postgresql.server.version>
+                <skipIntegrationTests>true</skipIntegrationTests>
+                <!-- Once we truly run tests with Testcontainers, this should be switch to "docker", activating ITs -->
+                <packaging.type>docker-build</packaging.type>
+                <postgresql.server.version>13</postgresql.server.version>
+            
+                <app.image>gdcc/dataverse:${app.image.tag}</app.image>
+                <app.image.tag>unstable</app.image.tag>
+                <base.image>gdcc/base:${base.image.tag}</base.image>
+                <base.image.tag>unstable</base.image.tag>
+                <conf.image>gdcc/configbaker:${conf.image.tag}</conf.image>
+                <conf.image.tag>${app.image.tag}</conf.image.tag>
+    
+                <docker.platforms></docker.platforms>
+            
+                <!-- Variables as used in docker-compose.yml -->
+                <APP_IMAGE>${app.image}</APP_IMAGE>
+                <POSTGRES_VERSION>${postgresql.server.version}</POSTGRES_VERSION>
+                <SOLR_VERSION>${solr.version}</SOLR_VERSION>
+                <DATAVERSE_DB_USER>dataverse</DATAVERSE_DB_USER>
             </properties>
+        
             <build>
                 <plugins>
+                    <!-- Build the exploded WAR target directory -->
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-war-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <phase>prepare-package</phase>
+                                <goals>
+                                    <goal>exploded</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                        <configuration>
+                        </configuration>
+                    </plugin>
+                
+                    <!-- Build image via Docker Maven Plugin -->
+                    <plugin>
+                        <groupId>io.fabric8</groupId>
+                        <artifactId>docker-maven-plugin</artifactId>
+                        <extensions>true</extensions>
+                        <configuration>
+                            <images>
+                                <!-- Dataverse Application image -->
+                                
+                                
+                                
+
+                                
+
+                            </images>
+                            <autoCreateCustomNetworks>true</autoCreateCustomNetworks>
+                        </configuration>
+                    </plugin>
                     <plugin>
                         <groupId>org.apache.maven.plugins</groupId>
                         <artifactId>maven-failsafe-plugin</artifactId>
                         <version>${maven-failsafe-plugin.version}</version>
                         <configuration>
-                            <groups>testcontainers</groups>
+                            <groups>end2end</groups>
                             <systemPropertyVariables>
                                 <postgresql.server.version>${postgresql.server.version}</postgresql.server.version>
                             </systemPropertyVariables>
+                            <skip>${skipIntegrationTests}</skip>
                         </configuration>
                         <executions>
                             <execution>
diff --git a/scripts/api/data/dataset-create-new-all-default-fields.json b/scripts/api/data/dataset-create-new-all-default-fields.json
index d7ae8cefbf7..1118ed98a03 100644
--- a/scripts/api/data/dataset-create-new-all-default-fields.json
+++ b/scripts/api/data/dataset-create-new-all-default-fields.json
@@ -22,9 +22,9 @@
           },
           {
             "typeName": "alternativeTitle",
-            "multiple": false,
+            "multiple": true,
             "typeClass": "primitive",
-            "value": "Alternative Title"
+            "value": ["Alternative Title"]
           },
           {
             "typeName": "alternativeURL",
@@ -466,9 +466,9 @@
           },
           {
             "typeName": "productionPlace",
-            "multiple": false,
+            "multiple": true,
             "typeClass": "primitive",
-            "value": "ProductionPlace"
+            "value": ["ProductionPlace"]
           },
           {
             "typeName": "contributor",
@@ -710,9 +710,9 @@
           },
           {
             "typeName": "series",
-            "multiple": false,
+            "multiple": true,
             "typeClass": "compound",
-            "value": {
+            "value": [{
               "seriesName": {
                 "typeName": "seriesName",
                 "multiple": false,
@@ -725,7 +725,7 @@
                 "typeClass": "primitive",
                 "value": "SeriesInformation"
               }
-            }
+            }]
           },
           {
             "typeName": "software",
@@ -899,25 +899,25 @@
                   "typeName": "westLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "10"
+                  "value": "-72"
                 },
                 "eastLongitude": {
                   "typeName": "eastLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "20"
+                  "value": "-70"
                 },
                 "northLongitude": {
                   "typeName": "northLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "30"
+                  "value": "43"
                 },
                 "southLongitude": {
                   "typeName": "southLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "40"
+                  "value": "42"
                 }
               },
               {
@@ -925,25 +925,25 @@
                   "typeName": "westLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "50"
+                  "value": "-18"
                 },
                 "eastLongitude": {
                   "typeName": "eastLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "60"
+                  "value": "-13"
                 },
                 "northLongitude": {
                   "typeName": "northLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "70"
+                  "value": "29"
                 },
                 "southLongitude": {
                   "typeName": "southLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "80"
+                  "value": "28"
                 }
               }
             ]
@@ -1404,7 +1404,7 @@
             "multiple": true,
             "typeClass": "controlledVocabulary",
             "value": [
-              "cell counting",
+              "genome sequencing",
               "cell sorting",
               "clinical chemistry analysis",
               "DNA methylation profiling"
diff --git a/scripts/api/data/dataset-create-new.json b/scripts/api/data/dataset-create-new.json
index 0017da15974..5831e0b17e6 100644
--- a/scripts/api/data/dataset-create-new.json
+++ b/scripts/api/data/dataset-create-new.json
@@ -4,6 +4,10 @@
   "persistentUrl": "http://dx.doi.org/10.5072/FK2/9",
   "protocol": "chadham-house-rule",
   "datasetVersion": {
+    "license": {
+      "name": "CC0 1.0",
+      "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+    },
     "metadataBlocks": {
       "citation": {
         "displayName": "Citation Metadata",
@@ -121,4 +125,4 @@
       }
     }
   }
-}
\ No newline at end of file
+}
diff --git a/scripts/api/data/dataset-finch1_fr.json b/scripts/api/data/dataset-finch1_fr.json
index ce9616fdef5..848e5e3587e 100644
--- a/scripts/api/data/dataset-finch1_fr.json
+++ b/scripts/api/data/dataset-finch1_fr.json
@@ -1,6 +1,10 @@
 {
   "metadataLanguage": "fr",
   "datasetVersion": {
+    "license": {
+      "name": "CC0 1.0",
+      "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+    },
     "metadataBlocks": {
       "citation": {
         "fields": [
diff --git a/scripts/api/data/metadatablocks/citation.tsv b/scripts/api/data/metadatablocks/citation.tsv
index be32bb7134e..b21b6bcce57 100644
--- a/scripts/api/data/metadatablocks/citation.tsv
+++ b/scripts/api/data/metadatablocks/citation.tsv
@@ -3,7 +3,7 @@
 #datasetField	name	title	description	watermark	 fieldType	displayOrder	displayFormat	advancedSearchField	allowControlledVocabulary	allowmultiples	facetable	displayoncreate	required	parent	metadatablock_id	termURI
 	title	Title	The main title of the Dataset		text	0		TRUE	FALSE	FALSE	FALSE	TRUE	TRUE		citation	http://purl.org/dc/terms/title
 	subtitle	Subtitle	A secondary title that amplifies or states certain limitations on the main title		text	1		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE		citation	
-	alternativeTitle	Alternative Title	Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title		text	2		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE		citation	http://purl.org/dc/terms/alternative
+	alternativeTitle	Alternative Title	Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title		text	2		FALSE	FALSE	TRUE	FALSE	FALSE	FALSE		citation	http://purl.org/dc/terms/alternative
 	alternativeURL	Alternative URL	Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage	https://	url	3	<a href="#VALUE" target="_blank">#VALUE</a>	FALSE	FALSE	FALSE	FALSE	FALSE	FALSE		citation	https://schema.org/distribution
 	otherId	Other Identifier	Another unique identifier for the Dataset (e.g. producer's or another repository's identifier)		none	4	:	FALSE	FALSE	TRUE	FALSE	FALSE	FALSE		citation	
 	otherIdAgency	Agency	The name of the agency that generated the other identifier		text	5	#VALUE	FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	otherId	citation	
@@ -66,7 +66,7 @@
 	dateOfCollectionStart	Start Date	The date when the data collection started	YYYY-MM-DD	date	62	#NAME: #VALUE 	FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	dateOfCollection	citation	
 	dateOfCollectionEnd	End Date	The date when the data collection ended	YYYY-MM-DD	date	63	#NAME: #VALUE 	FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	dateOfCollection	citation	
 	kindOfData	Data Type	The type of data included in the files (e.g. survey data, clinical data, or machine-readable text)		text	64		TRUE	FALSE	TRUE	TRUE	FALSE	FALSE		citation	http://rdf-vocabulary.ddialliance.org/discovery#kindOfData
-	series	Series	Information about the dataset series to which the Dataset belong		none	65	:	FALSE	FALSE	FALSE	FALSE	FALSE	FALSE		citation	
+	series	Series	Information about the dataset series to which the Dataset belong		none	65	:	FALSE	FALSE	TRUE	FALSE	FALSE	FALSE		citation	
 	seriesName	Name	The name of the dataset series		text	66	#VALUE	TRUE	FALSE	FALSE	TRUE	FALSE	FALSE	series	citation	
 	seriesInformation	Information	Can include 1) a history of the series and 2) a summary of features that apply to the series		textbox	67	#VALUE	FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	series	citation	
 	software	Software	Information about the software used to generate the Dataset		none	68	,	FALSE	FALSE	TRUE	FALSE	FALSE	FALSE		citation	https://www.w3.org/TR/prov-o/#wasGeneratedBy
diff --git a/scripts/api/data/metadatablocks/geospatial.tsv b/scripts/api/data/metadatablocks/geospatial.tsv
index a3a8e7efd58..ce481c1bf84 100644
--- a/scripts/api/data/metadatablocks/geospatial.tsv
+++ b/scripts/api/data/metadatablocks/geospatial.tsv
@@ -8,10 +8,10 @@
 	otherGeographicCoverage	Other	Other information on the geographic coverage of the data.		text	4	#VALUE, 	FALSE	FALSE	FALSE	TRUE	FALSE	FALSE	geographicCoverage	geospatial
 	geographicUnit	Geographic Unit	Lowest level of geographic aggregation covered by the Dataset, e.g., village, county, region.		text	5		TRUE	FALSE	TRUE	TRUE	FALSE	FALSE		geospatial
 	geographicBoundingBox	Geographic Bounding Box	The fundamental geometric description for any Dataset that models geography is the geographic bounding box. It describes the minimum box, defined by west and east longitudes and north and south latitudes, which includes the largest geographic extent of the  Dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. Inclusion of this element in the codebook is recommended, but is required if the bound polygon box is included. 		none	6		FALSE	FALSE	TRUE	FALSE	FALSE	FALSE		geospatial
-	westLongitude	West Longitude	Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values,  expressed in decimal degrees, is -180,0 <= West  Bounding Longitude Value <= 180,0.		text	7		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	geographicBoundingBox	geospatial
-	eastLongitude	East Longitude	Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values,  expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0.		text	8		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	geographicBoundingBox	geospatial
-	northLongitude	North Latitude	Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values,  expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0.		text	9		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	geographicBoundingBox	geospatial
-	southLongitude	South Latitude	Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values,  expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0.		text	10		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	geographicBoundingBox	geospatial
+	westLongitude	Westernmost (Left) Longitude	Westernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values,  expressed in decimal degrees, is -180,0 <= West  Bounding Longitude Value <= 180,0.		text	7		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	geographicBoundingBox	geospatial
+	eastLongitude	Easternmost (Right) Longitude	Easternmost coordinate delimiting the geographic extent of the Dataset. A valid range of values,  expressed in decimal degrees, is -180,0 <= East Bounding Longitude Value <= 180,0.		text	8		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	geographicBoundingBox	geospatial
+	northLongitude	Northernmost (Top) Latitude	Northernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values,  expressed in decimal degrees, is -90,0 <= North Bounding Latitude Value <= 90,0.		text	9		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	geographicBoundingBox	geospatial
+	southLongitude	Southernmost (Bottom) Latitude	Southernmost coordinate delimiting the geographic extent of the Dataset. A valid range of values,  expressed in decimal degrees, is -90,0 <= South Bounding Latitude Value <= 90,0.		text	10		FALSE	FALSE	FALSE	FALSE	FALSE	FALSE	geographicBoundingBox	geospatial
 #controlledVocabulary	DatasetField	Value	identifier	displayOrder											
 	country	Afghanistan		0											
 	country	Albania		1											
diff --git a/scripts/api/setup-all.sh b/scripts/api/setup-all.sh
index c4bd6c2c9c5..e247caa72b5 100755
--- a/scripts/api/setup-all.sh
+++ b/scripts/api/setup-all.sh
@@ -3,7 +3,14 @@
 SECURESETUP=1
 DV_SU_PASSWORD="admin"
 
-for opt in $*
+DATAVERSE_URL=${DATAVERSE_URL:-"http://localhost:8080"}
+# Make sure scripts we call from this one also get this env var!
+export DATAVERSE_URL
+
+# scripts/api when called from the root of the source tree
+SCRIPT_PATH="$(dirname "$0")"
+
+for opt in "$@"
 do
   case $opt in
       "--insecure")
@@ -24,13 +31,9 @@ do
   esac
 done
 
+# shellcheck disable=SC2016
 command -v jq >/dev/null 2>&1 || { echo >&2 '`jq` ("sed for JSON") is required, but not installed. Download the binary for your platform from http://stedolan.github.io/jq/ and make sure it is in your $PATH (/usr/bin/jq is fine) and executable with `sudo chmod +x /usr/bin/jq`. On Mac, you can install it with `brew install jq` if you use homebrew: http://brew.sh . Aborting.'; exit 1; }
 
-echo "deleting all data from Solr"
-curl http://localhost:8983/solr/collection1/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}"
-
-SERVER=http://localhost:8080/api
-
 # Everything + the kitchen sink, in a single script
 # - Setup the metadata blocks and controlled vocabulary
 # - Setup the builtin roles
@@ -41,49 +44,49 @@ SERVER=http://localhost:8080/api
 
 
 echo "Setup the metadata blocks"
-./setup-datasetfields.sh
+"$SCRIPT_PATH"/setup-datasetfields.sh
 
 echo "Setup the builtin roles"
-./setup-builtin-roles.sh
+"$SCRIPT_PATH"/setup-builtin-roles.sh
 
 echo "Setup the authentication providers"
-./setup-identity-providers.sh
+"$SCRIPT_PATH"/setup-identity-providers.sh
 
 echo "Setting up the settings"
 echo  "- Allow internal signup"
-curl -X PUT -d yes "$SERVER/admin/settings/:AllowSignUp"
-curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl"
-
-curl -X PUT -d doi "$SERVER/admin/settings/:Protocol"
-curl -X PUT -d 10.5072 "$SERVER/admin/settings/:Authority"
-curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder"
-curl -X PUT -d DataCite "$SERVER/admin/settings/:DoiProvider"
-curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY
-curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy
-curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods
+curl -X PUT -d yes "${DATAVERSE_URL}/api/admin/settings/:AllowSignUp"
+curl -X PUT -d "/dataverseuser.xhtml?editMode=CREATE" "${DATAVERSE_URL}/api/admin/settings/:SignUpUrl"
+
+curl -X PUT -d doi "${DATAVERSE_URL}/api/admin/settings/:Protocol"
+curl -X PUT -d 10.5072 "${DATAVERSE_URL}/api/admin/settings/:Authority"
+curl -X PUT -d "FK2/" "${DATAVERSE_URL}/api/admin/settings/:Shoulder"
+curl -X PUT -d DataCite "${DATAVERSE_URL}/api/admin/settings/:DoiProvider"
+curl -X PUT -d burrito "${DATAVERSE_URL}/api/admin/settings/BuiltinUsers.KEY"
+curl -X PUT -d localhost-only "${DATAVERSE_URL}/api/admin/settings/:BlockedApiPolicy"
+curl -X PUT -d 'native/http' "${DATAVERSE_URL}/api/admin/settings/:UploadMethods"
 echo
 
 echo "Setting up the admin user (and as superuser)"
-adminResp=$(curl -s -H "Content-type:application/json" -X POST -d @data/user-admin.json "$SERVER/builtin-users?password=$DV_SU_PASSWORD&key=burrito")
-echo $adminResp
-curl -X POST "$SERVER/admin/superuser/dataverseAdmin"
+adminResp=$(curl -s -H "Content-type:application/json" -X POST -d @"$SCRIPT_PATH"/data/user-admin.json "${DATAVERSE_URL}/api/builtin-users?password=$DV_SU_PASSWORD&key=burrito")
+echo "$adminResp"
+curl -X POST "${DATAVERSE_URL}/api/admin/superuser/dataverseAdmin"
 echo
 
 echo "Setting up the root dataverse"
-adminKey=$(echo $adminResp | jq .data.apiToken | tr -d \")
-curl -s -H "Content-type:application/json" -X POST -d @data/dv-root.json "$SERVER/dataverses/?key=$adminKey"
+adminKey=$(echo "$adminResp" | jq .data.apiToken | tr -d \")
+curl -s -H "Content-type:application/json" -X POST -d @"$SCRIPT_PATH"/data/dv-root.json "${DATAVERSE_URL}/api/dataverses/?key=$adminKey"
 echo
 echo "Set the metadata block for Root"
-curl -s -X POST -H "Content-type:application/json" -d "[\"citation\"]" $SERVER/dataverses/:root/metadatablocks/?key=$adminKey
+curl -s -X POST -H "Content-type:application/json" -d "[\"citation\"]" "${DATAVERSE_URL}/api/dataverses/:root/metadatablocks/?key=$adminKey"
 echo
 echo "Set the default facets for Root"
-curl -s -X POST -H "Content-type:application/json" -d "[\"authorName\",\"subject\",\"keywordValue\",\"dateOfDeposit\"]" $SERVER/dataverses/:root/facets/?key=$adminKey
+curl -s -X POST -H "Content-type:application/json" -d "[\"authorName\",\"subject\",\"keywordValue\",\"dateOfDeposit\"]" "${DATAVERSE_URL}/api/dataverses/:root/facets/?key=$adminKey"
 echo
 
 echo "Set up licenses"
 # Note: CC0 has been added and set as the default license through
 # Flyway script V5.9.0.1__7440-configurable-license-list.sql
-curl -X POST -H 'Content-Type: application/json' -H "X-Dataverse-key:$adminKey" $SERVER/licenses --upload-file data/licenses/licenseCC-BY-4.0.json
+curl -X POST -H 'Content-Type: application/json' -H "X-Dataverse-key:$adminKey" "${DATAVERSE_URL}/api/licenses" --upload-file "$SCRIPT_PATH"/data/licenses/licenseCC-BY-4.0.json
 
 # OPTIONAL USERS AND DATAVERSES
 #./setup-optional.sh
@@ -92,8 +95,8 @@ if [ $SECURESETUP = 1 ]
 then
     # Revoke the "burrito" super-key; 
     # Block sensitive API endpoints;
-    curl -X DELETE $SERVER/admin/settings/BuiltinUsers.KEY
-    curl -X PUT -d 'admin,builtin-users' $SERVER/admin/settings/:BlockedApiEndpoints
+    curl -X DELETE "${DATAVERSE_URL}/api/admin/settings/BuiltinUsers.KEY"
+    curl -X PUT -d 'admin,builtin-users' "${DATAVERSE_URL}/api/admin/settings/:BlockedApiEndpoints"
     echo "Access to the /api/admin and /api/test is now disabled, except for connections from localhost."
 else 
     echo "IMPORTANT!!!"
diff --git a/scripts/api/setup-builtin-roles.sh b/scripts/api/setup-builtin-roles.sh
index 0f3c1c150cd..f1f268debbc 100755
--- a/scripts/api/setup-builtin-roles.sh
+++ b/scripts/api/setup-builtin-roles.sh
@@ -1,34 +1,37 @@
-SERVER=http://localhost:8080/api
+#!/bin/bash
+
+DATAVERSE_URL=${DATAVERSE_URL:-"http://localhost:8080"}
+SCRIPT_PATH="$(dirname "$0")"
 
 # Setup the builtin roles
 echo "Setting up admin role"
-curl -H "Content-type:application/json" -d @data/role-admin.json http://localhost:8080/api/admin/roles/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/role-admin.json "${DATAVERSE_URL}/api/admin/roles/"
 echo
 
 echo "Setting up file downloader role"
-curl -H "Content-type:application/json" -d @data/role-filedownloader.json http://localhost:8080/api/admin/roles/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/role-filedownloader.json "${DATAVERSE_URL}/api/admin/roles/"
 echo
 
 echo "Setting up full contributor role"
-curl -H "Content-type:application/json" -d @data/role-fullContributor.json http://localhost:8080/api/admin/roles/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/role-fullContributor.json "${DATAVERSE_URL}/api/admin/roles/"
 echo
 
 echo "Setting up dv contributor role"
-curl -H "Content-type:application/json" -d @data/role-dvContributor.json http://localhost:8080/api/admin/roles/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/role-dvContributor.json "${DATAVERSE_URL}/api/admin/roles/"
 echo
 
 echo "Setting up ds contributor role"
-curl -H "Content-type:application/json" -d @data/role-dsContributor.json http://localhost:8080/api/admin/roles/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/role-dsContributor.json "${DATAVERSE_URL}/api/admin/roles/"
 echo
 
 echo "Setting up editor role"
-curl -H "Content-type:application/json" -d @data/role-editor.json http://localhost:8080/api/admin/roles/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/role-editor.json "${DATAVERSE_URL}/api/admin/roles/"
 echo
 
 echo "Setting up curator role"
-curl -H "Content-type:application/json" -d @data/role-curator.json http://localhost:8080/api/admin/roles/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/role-curator.json "${DATAVERSE_URL}/api/admin/roles/"
 echo
 
 echo "Setting up member role"
-curl -H "Content-type:application/json" -d @data/role-member.json http://localhost:8080/api/admin/roles/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/role-member.json "${DATAVERSE_URL}/api/admin/roles/"
 echo
diff --git a/scripts/api/setup-datasetfields.sh b/scripts/api/setup-datasetfields.sh
index 0d2d60b9538..51da677ceb8 100755
--- a/scripts/api/setup-datasetfields.sh
+++ b/scripts/api/setup-datasetfields.sh
@@ -1,9 +1,13 @@
-#!/bin/sh
-curl http://localhost:8080/api/admin/datasetfield/loadNAControlledVocabularyValue
+#!/bin/bash
+
+DATAVERSE_URL=${DATAVERSE_URL:-"http://localhost:8080"}
+SCRIPT_PATH="$(dirname "$0")"
+
+curl "${DATAVERSE_URL}/api/admin/datasetfield/loadNAControlledVocabularyValue"
 # TODO: The "@" is confusing. Consider switching to --upload-file citation.tsv
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values"
-curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values"
+curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/citation.tsv -H "Content-type: text/tab-separated-values"
+curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/geospatial.tsv -H "Content-type: text/tab-separated-values"
+curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/social_science.tsv -H "Content-type: text/tab-separated-values"
+curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/astrophysics.tsv -H "Content-type: text/tab-separated-values"
+curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/biomedical.tsv -H "Content-type: text/tab-separated-values"
+curl "${DATAVERSE_URL}/api/admin/datasetfield/load" -X POST --data-binary @"$SCRIPT_PATH"/data/metadatablocks/journals.tsv -H "Content-type: text/tab-separated-values"
diff --git a/scripts/api/setup-identity-providers.sh b/scripts/api/setup-identity-providers.sh
index 89ac59de32f..e877f71c6b0 100755
--- a/scripts/api/setup-identity-providers.sh
+++ b/scripts/api/setup-identity-providers.sh
@@ -1,8 +1,11 @@
-SERVER=http://localhost:8080/api
+#!/bin/bash
+
+DATAVERSE_URL=${DATAVERSE_URL:-"http://localhost:8080"}
+SCRIPT_PATH="$(dirname "$0")"
 
 # Setup the authentication providers
 echo "Setting up internal user provider"
-curl -H "Content-type:application/json" -d @data/authentication-providers/builtin.json http://localhost:8080/api/admin/authenticationProviders/
+curl -H "Content-type:application/json" -d @"$SCRIPT_PATH"/data/authentication-providers/builtin.json "${DATAVERSE_URL}/api/admin/authenticationProviders/"
 
 #echo "Setting up Echo providers"
 #curl -H "Content-type:application/json" -d @data/authentication-providers/echo.json http://localhost:8080/api/admin/authenticationProviders/
diff --git a/scripts/dev/dev-rebuild.sh b/scripts/dev/dev-rebuild.sh
index 71857b14068..9eae195b135 100755
--- a/scripts/dev/dev-rebuild.sh
+++ b/scripts/dev/dev-rebuild.sh
@@ -1,8 +1,9 @@
 #!/bin/sh
-PAYARA_DIR=/usr/local/payara5
+PAYARA_DIR=/usr/local/payara6
 ASADMIN=$PAYARA_DIR/glassfish/bin/asadmin
 DB_NAME=dvndb
 DB_USER=dvnapp
+export PGPASSWORD=secret
 
 echo "Checking if there is a war file to undeploy..."
 LIST_APP=$($ASADMIN list-applications -t)
@@ -23,7 +24,7 @@ echo "Deleting ALL DATA FILES uploaded to Dataverse..."
 rm -rf $PAYARA_DIR/glassfish/domains/domain1/files
 
 echo "Terminating database sessions so we can drop the database..."
-psql -U postgres -c "
+psql -h localhost -U postgres -c "
 SELECT pg_terminate_backend(pg_stat_activity.pid)
 FROM pg_stat_activity
 WHERE pg_stat_activity.datname = '$DB_NAME'
@@ -31,14 +32,14 @@ WHERE pg_stat_activity.datname = '$DB_NAME'
 " template1
 
 echo "Dropping the database..."
-psql -U $DB_USER -c "DROP DATABASE \"$DB_NAME\"" template1
+psql -h localhost -U $DB_USER -c "DROP DATABASE \"$DB_NAME\"" template1
 echo $?
 
 echo "Clearing out data from Solr..."
-curl http://localhost:8983/solr/collection1/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}"
+curl "http://localhost:8983/solr/collection1/update/json?commit=true" -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}"
 
 echo "Creating a new database..."
-psql -U $DB_USER -c "CREATE DATABASE \"$DB_NAME\" WITH OWNER = \"$DB_USER\"" template1
+psql -h localhost -U $DB_USER -c "CREATE DATABASE \"$DB_NAME\" WITH OWNER = \"$DB_USER\"" template1
 echo $?
 
 echo "Starting app server..."
@@ -53,7 +54,7 @@ cd scripts/api
 cd ../..
 
 echo "Creating SQL sequence..."
-psql -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql
+psql -h localhost -U $DB_USER $DB_NAME -f doc/sphinx-guides/source/_static/util/createsequence.sql
 
 echo "Setting DOI provider to \"FAKE\"..." 
 curl http://localhost:8080/api/admin/settings/:DoiProvider -X PUT -d FAKE
diff --git a/scripts/dev/docker-final-setup.sh b/scripts/dev/docker-final-setup.sh
new file mode 100755
index 00000000000..d2453619ec2
--- /dev/null
+++ b/scripts/dev/docker-final-setup.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+
+set -euo pipefail
+
+echo "Running setup-all.sh (INSECURE MODE)..."
+cd scripts/api || exit
+./setup-all.sh --insecure -p=admin1 | tee /tmp/setup-all.sh.out
+cd ../..
+
+echo "Setting system mail address..."
+curl -X PUT -d "dataverse@localhost" "http://localhost:8080/api/admin/settings/:SystemEmail"
+
+echo "Setting DOI provider to \"FAKE\"..."
+curl "http://localhost:8080/api/admin/settings/:DoiProvider" -X PUT -d FAKE
+
+API_TOKEN=$(grep apiToken "/tmp/setup-all.sh.out" | jq ".data.apiToken" | tr -d \")
+export API_TOKEN
+
+echo "Publishing root dataverse..."
+curl -H "X-Dataverse-key:$API_TOKEN" -X POST "http://localhost:8080/api/dataverses/:root/actions/:publish"
+
+echo "Allowing users to create dataverses and datasets in root..."
+curl -H "X-Dataverse-key:$API_TOKEN" -X POST -H "Content-type:application/json" -d "{\"assignee\": \":authenticated-users\",\"role\": \"fullContributor\"}" "http://localhost:8080/api/dataverses/:root/assignments"
+
+echo "Checking Dataverse version..."
+curl "http://localhost:8080/api/info/version"
\ No newline at end of file
diff --git a/scripts/installer/Makefile b/scripts/installer/Makefile
index d40d4d792ea..399bc65168a 100644
--- a/scripts/installer/Makefile
+++ b/scripts/installer/Makefile
@@ -7,7 +7,6 @@ JHOVE_SCHEMA=${INSTALLER_ZIP_DIR}/jhoveConfig.xsd
 SOLR_SCHEMA=${INSTALLER_ZIP_DIR}/schema.xml ${INSTALLER_ZIP_DIR}/update-fields.sh
 SOLR_CONFIG=${INSTALLER_ZIP_DIR}/solrconfig.xml
 PYTHON_FILES=${INSTALLER_ZIP_DIR}/README_python.txt ${INSTALLER_ZIP_DIR}/installConfig.py ${INSTALLER_ZIP_DIR}/installUtils.py ${INSTALLER_ZIP_DIR}/install.py ${INSTALLER_ZIP_DIR}/installAppServer.py ${INSTALLER_ZIP_DIR}/requirements.txt ${INSTALLER_ZIP_DIR}/default.config ${INSTALLER_ZIP_DIR}/interactive.config
-INSTALL_SCRIPT=${INSTALLER_ZIP_DIR}/install
 
 installer:	dvinstall.zip
 
@@ -56,13 +55,13 @@ ${JHOVE_SCHEMA}:	../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR}
 	@echo copying jhove schema file
 	/bin/cp ../../conf/jhove/jhoveConfig.xsd ${INSTALLER_ZIP_DIR}
 
-${SOLR_SCHEMA}:		../../conf/solr/8.11.1/schema.xml ../../conf/solr/8.11.1/update-fields.sh ${INSTALLER_ZIP_DIR}
+${SOLR_SCHEMA}:		../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR}
 	@echo copying Solr schema file
-	/bin/cp ../../conf/solr/8.11.1/schema.xml ../../conf/solr/8.11.1/update-fields.sh ${INSTALLER_ZIP_DIR}
+	/bin/cp ../../conf/solr/9.3.0/schema.xml ../../conf/solr/9.3.0/update-fields.sh ${INSTALLER_ZIP_DIR}
 
-${SOLR_CONFIG}:		../../conf/solr/8.11.1/solrconfig.xml ${INSTALLER_ZIP_DIR}
+${SOLR_CONFIG}:		../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR}
 	@echo copying Solr config file
-	/bin/cp ../../conf/solr/8.11.1/solrconfig.xml ${INSTALLER_ZIP_DIR}
+	/bin/cp ../../conf/solr/9.3.0/solrconfig.xml ${INSTALLER_ZIP_DIR}
 
 ${PYTHON_FILES}: README_python.txt install.py installConfig.py installAppServer.py installUtils.py requirements.txt default.config interactive.config ${INSTALLER_ZIP_DIR}
 	@echo copying Python installer files
diff --git a/scripts/installer/README.txt b/scripts/installer/README.txt
index 350a17fc00c..c3ed8211082 100644
--- a/scripts/installer/README.txt
+++ b/scripts/installer/README.txt
@@ -1,42 +1 @@
-The installer script (install) can be run either by a developer (inside the source tree), or by an end-user installing the Dataverse. The latter will obtain the script as part of the distribution bundle; and they will be running it inside the unzipped bundle directory. 
-
-In the former (developer) case, the installer will be looking for the files it needs in the other directories in the source tree. 
-For example, the war file (once built) can be found in ../../target/. The name of the war file will be dataverse-{VERSION}.war, where
-{VERSION} is the version number of the Dataverse, obtained from the pom file (../../pom.xml). For example, as of writing this README.txt (July 2015) the war file is ../../target/dataverse-4.1.war/
-
-When building a distribution archive, the Makefile will pile all the files that the installer needs in one directory (./dvinstall here) and then zip it up. We upload the resulting zip bundle on github as the actual software release. This way the end user only gets the files they actually need to install the Dataverse app. So they can do so without pulling the entire source tree. 
-
-
-The installer script itself (the perl script ./install) knows to look for all these files in 2 places (for example, it will look for the war file in ../../target/; if it's not there, it'll assume this is a distribution bundle and look for it as ./dataverse.war)
-
-Here's the list of the files that the installer needs: 
-
-the war file:
-target/dataverse-{VERSION}.war
-
-and also:
-
-from scripts/installer (this directory):
-
-install
-glassfish-setup.sh
-
-from scripts/api:
-
-setup-all.sh
-setup-builtin-roles.sh
-setup-datasetfields.sh
-setup-dvs.sh
-setup-identity-providers.sh
-setup-users.sh
-data (the entire directory with all its contents)
-
-from conf/jhove:
-
-jhove.conf
-
-SOLR schema and config files, from conf/solr/8.11.1:
-
-schema.xml
-schema_dv_mdb_fields.xml
-solrconfig.xml
+See README_python.txt
diff --git a/scripts/installer/as-setup.sh b/scripts/installer/as-setup.sh
index 853db77f471..fc5b378cff5 100755
--- a/scripts/installer/as-setup.sh
+++ b/scripts/installer/as-setup.sh
@@ -56,15 +56,15 @@ function preliminary_setup()
 
   # avoid OutOfMemoryError: PermGen per http://eugenedvorkin.com/java-lang-outofmemoryerror-permgen-space-error-during-deployment-to-glassfish/
   #./asadmin $ASADMIN_OPTS list-jvm-options
-  # Note that these JVM options are different for Payara5 and Glassfish4:
+  # Note that these JVM options are different for Payara and Glassfish4:
   # old Glassfish4 options: (commented out)
   #./asadmin $ASADMIN_OPTS delete-jvm-options "-XX\:MaxPermSize=192m"
   #./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxPermSize=512m"
   #./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:PermSize=256m"
-  # payara5 ships with the "-server" option already in domain.xml, so no need:
+  # Payara ships with the "-server" option already in domain.xml, so no need:
   #./asadmin $ASADMIN_OPTS delete-jvm-options -client
 
-  # new Payara5 options: (thanks to donsizemore@unc.edu)
+  # new Payara options: (thanks to donsizemore@unc.edu)
   ./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MaxMetaspaceSize=512m"
   ./asadmin $ASADMIN_OPTS create-jvm-options "-XX\:MetaspaceSize=256m"
   ./asadmin $ASADMIN_OPTS create-jvm-options "-Dfish.payara.classloading.delegate=false"
@@ -106,16 +106,19 @@ function preliminary_setup()
   # (we can no longer offer EZID with their shared test account)
   # jvm-options use colons as separators, escape as literal
   DOI_BASEURL_ESC=`echo $DOI_BASEURL | sed -e 's/:/\\\:/'`
-  ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.username=${DOI_USERNAME}"
-  ./asadmin $ASADMIN_OPTS create-jvm-options '\-Ddoi.password=${ALIAS=doi_password_alias}'
-  ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.baseurlstring=$DOI_BASEURL_ESC"
+  ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.datacite.username=${DOI_USERNAME}"
+  ./asadmin $ASADMIN_OPTS create-jvm-options '\-Ddataverse.pid.datacite.password=${ALIAS=doi_password_alias}'
+  ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.datacite.mds-api-url=$DOI_BASEURL_ESC"
 
   # jvm-options use colons as separators, escape as literal
   DOI_DATACITERESTAPIURL_ESC=`echo $DOI_DATACITERESTAPIURL | sed -e 's/:/\\\:/'`
-  ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddoi.dataciterestapiurlstring=$DOI_DATACITERESTAPIURL_ESC"
+  ./asadmin $ASADMIN_OPTS create-jvm-options "\-Ddataverse.pid.datacite.rest-api-url=$DOI_DATACITERESTAPIURL_ESC"
 
   ./asadmin $ASADMIN_OPTS create-jvm-options "-Ddataverse.timerServer=true"
 
+  # Workaround for FISH-7722: Failed to deploy war with @Stateless https://github.com/payara/Payara/issues/6337
+  ./asadmin $ASADMIN_OPTS create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED
+
   # enable comet support
   ./asadmin $ASADMIN_OPTS set server-config.network-config.protocols.protocol.http-listener-1.http.comet-support-enabled="true"
 
@@ -155,18 +158,18 @@ function final_setup(){
 
 if [ "$DOCKER_BUILD" = "true" ]
   then
-    FILES_DIR="/usr/local/payara5/glassfish/domains/domain1/files"
+    FILES_DIR="/usr/local/payara6/glassfish/domains/domain1/files"
     RSERVE_HOST="localhost"
     RSERVE_PORT="6311"
     RSERVE_USER="rserve"
     RSERVE_PASS="rserve"
     HOST_ADDRESS="localhost\:8080"
-    pushd /usr/local/payara5/glassfish/bin/
+    pushd /usr/local/payara6/glassfish/bin/
     ./asadmin start-domain domain1
     preliminary_setup
-    chmod -R 777 /usr/local/payara5/
-    rm -rf /usr/local/payara5/glassfish/domains/domain1/generated 
-    rm -rf /usr/local/payara5/glassfish/domains/domain1/applications
+    chmod -R 777 /usr/local/payara6/
+    rm -rf /usr/local/payara6/glassfish/domains/domain1/generated
+    rm -rf /usr/local/payara6/glassfish/domains/domain1/applications
     popd
     exit 0
 fi
@@ -276,7 +279,7 @@ if [ ! -d "$DOMAIN_DIR" ]
     exit 2
 fi
 
-echo "Setting up your app. server (Payara5) to support Dataverse"
+echo "Setting up your app. server (Payara) to support Dataverse"
 echo "Payara directory: "$GLASSFISH_ROOT
 echo "Domain directory:    "$DOMAIN_DIR
 
diff --git a/scripts/installer/default.config b/scripts/installer/default.config
index 312dd2cb2d8..8647cd02416 100644
--- a/scripts/installer/default.config
+++ b/scripts/installer/default.config
@@ -1,7 +1,7 @@
 [glassfish]
 HOST_DNS_ADDRESS = localhost
 GLASSFISH_USER = dataverse
-GLASSFISH_DIRECTORY = /usr/local/payara5
+GLASSFISH_DIRECTORY = /usr/local/payara6
 GLASSFISH_ADMIN_USER = admin
 GLASSFISH_ADMIN_PASSWORD = secret
 GLASSFISH_HEAP = 2048
diff --git a/scripts/installer/install b/scripts/installer/install
deleted file mode 100755
index 2208f014606..00000000000
--- a/scripts/installer/install
+++ /dev/null
@@ -1,1538 +0,0 @@
-#!/usr/bin/perl
-
-use strict;
-use warnings;
-use Getopt::Long;
-use Socket;
-use File::Copy;
-
-# command line options:
-
-my $verbose;
-my $postgresonly;
-my $hostname;
-my $gfuser;
-my $gfdir;
-my $mailserver;
-my $noninteractive;
-my $skipdatabasesetup;
-my $force;
-my $nogfpasswd;
-my $admin_email;
-
-my ($rez) = GetOptions(
-    #"length=i" => \$length,    # numeric
-    #"file=s"   => \$data,      # string
-    "verbose"      => \$verbose,
-    "pg_only"      => \$postgresonly,
-    "skip_db_setup" => \$skipdatabasesetup,
-    "hostname=s"   => \$hostname,
-    "gfuser=s"     => \$gfuser,
-    "gfdir=s"      => \$gfdir,
-    "mailserver=s" => \$mailserver,
-    "y|yes"        => \$noninteractive,
-    "f|force"      => \$force,
-    "nogfpasswd"   => \$nogfpasswd,
-    "admin_email=s" => \$admin_email,
-);
-
-# openshift/docker-specific - name of the "pod" executing the installer:
-my $pod_name = "";
-if (exists($ENV{'MY_POD_NAME'}))
-{
-    $pod_name = $ENV{'MY_POD_NAME'};
-}
-
-my $jq_exec_path = "";
-my $psql_exec_path = "";
-my $cwd;
-my $WARFILE_LOCATION = "dataverse.war";
-
-
-my @CONFIG_VARIABLES;
-
-if ($postgresonly) 
-{
-    @CONFIG_VARIABLES =
-      ( 'POSTGRES_SERVER', 'POSTGRES_PORT', 'POSTGRES_DATABASE', 'POSTGRES_USER', 'POSTGRES_PASSWORD', 'POSTGRES_ADMIN_PASSWORD' );
-
-} 
-else 
-{
-
-    @CONFIG_VARIABLES = (
-	'HOST_DNS_ADDRESS',
-	'GLASSFISH_USER',
-	'GLASSFISH_DIRECTORY',
-	'ADMIN_EMAIL',
-	'MAIL_SERVER',
-
-	'POSTGRES_SERVER',
-	'POSTGRES_PORT',
-	'POSTGRES_ADMIN_PASSWORD',
-	'POSTGRES_DATABASE',
-	'POSTGRES_USER',
-	'POSTGRES_PASSWORD',
-
-	'SOLR_LOCATION', 
-	
-	'RSERVE_HOST',
-	'RSERVE_PORT',
-	'RSERVE_USER',
-	'RSERVE_PASSWORD',
-
-	'DOI_USERNAME',
-	'DOI_PASSWORD',
-	'DOI_BASEURL',
-	'DOI_DATACITERESTAPIURL'
-
-	);
-}
-
-my %CONFIG_DEFAULTS; 
-
-&read_config_defaults("default.config");
-
-my %CONFIG_PROMPTS;
-my %CONFIG_COMMENTS; 
-
-&read_interactive_config_values("interactive.config");
-
-my $API_URL = "http://localhost:8080/api";
-
-# jodbc.postgresql.org recommends 4.2 for Java 8.
-# updated drivers may be obtained from
-#  https://jdbc.postgresql.org/download.html
-my $postgres_jdbc = "postgresql-42.2.12.jar";
-
-# 0. A few preliminary checks:
-
-# 0a. OS:
-
-my $uname_out = `uname -a`;
-
-my @uname_tokens = split( " ", $uname_out );
-
-my $WORKING_OS;
-if ( $uname_tokens[0] eq "Darwin" ) {
-    print "\nThis appears to be a MacOS X system; good.\n";
-    # TODO: check the OS version
-
-    $WORKING_OS = "MacOSX";
-}
-elsif ( $uname_tokens[0] eq "Linux" ) {
-    if ( -f "/etc/redhat-release" ) {
-        print "\nThis appears to be a RedHat system; good.\n";
-        $WORKING_OS = "RedHat";
-        # TODO: check the distro version
-    }
-    else {
-        print "\nThis appears to be a non-RedHat Linux system;\n";
-        print "this installation *may* succeed; but we're not making any promises!\n";
-        $WORKING_OS = "Linux";
-    }
-} else {
-    print "\nWARNING: This appears to be neither a Linux or MacOS X system!\n";
-    print "This installer script will most likely fail. Please refer to the\n";
-    print "DVN Installers Guide for more information.\n\n";
-
-    $WORKING_OS = "Unknown";
-
-    unless ($noninteractive) {
-        exit 0;
-    }
-
-    print "(Normally we would stop right there; but since the \"--yes\" option was specified, we'll attempt to continue)\n\n";
-
-}
-
-
-# 0b. host name:
-
-if ($hostname) {
-    $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname;
-} else {
-    my $hostname_from_cmdline = `hostname`;
-    chop $hostname_from_cmdline;
-
-    $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} = $hostname_from_cmdline;
-}
-
-# 0c. check if there is the default.config file with the pre-set configuration values: 
-
-#  read default configuration values from tab separated file "default.config" if it exists
-#  moved after the $hostname_from_cmdline section to avoid excessively complicating the logic
-#  of command line argument, automatic selection, or config file.
-#
-#  NOTE: if the file contain any Postgres configuration (for example: "POSTGRES_USER   dvnApp")
-#  but an environmental variable with the same name exists - THE ENV. VAR WILL WIN! (don't ask)
-#  (actually this is to accommodate the openshift docker deployment scenario)
-
-sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s };
-
-#my $config_default_file = "default.config";
-#
-#if ( -e $config_default_file )
-#{
-#	print("loading default configuration values from $config_default_file\n");
-#	open( my $inp_cfg, $config_default_file );
-#	while( my $ln = <$inp_cfg> )
-#	{
-#		my @xs = split('\t', $ln );
-#		if ( 2 == @xs )
-#		{
-#			my $k = $xs[0];
-#                        my $v = trim($xs[1]);
-##                        if (defined $ENV{$k} && ($k eq "POSTGRES_USER" || $k eq "POSTGRES_PASSWORD")) {
-##                            $v = $ENV{$k};
-##                        }
-##                        if (defined $ENV{'POSTGRESQL_ADMIN_PASSWORD'} && $k eq "POSTGRES_ADMIN_PASSWORD")   {
-##                            $v = $ENV{'POSTGRESQL_ADMIN_PASSWORD'};
-##                        }
-#                        $CONFIG_DEFAULTS{$k}=$v;
-#		}
-#	}
-#}
-#else
-#{
-#	print("using hard-coded default configuration values (no $config_default_file available)\n");
-#}
-
-# 0d. current OS user. (the first one we find wins)
-
-my $current_user = $ENV{LOGNAME} || $ENV{USER} || getpwuid($<);
-
-# if the username was specified on the command-line, it takes precendence:
-if ($gfuser) {
-    print "Using CLI-specified user $gfuser.\n";
-    $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $gfuser;
-}
-
-
-if (!$CONFIG_DEFAULTS{'GLASSFISH_USER'} || !$noninteractive) {
-   $CONFIG_DEFAULTS{'GLASSFISH_USER'} = $current_user;
-   print "using $current_user.\n";
-}
-
-
-# prefer that we not install as root.
-unless ( $< != 0 ) {
-    print "####################################################################\n";
-    print "     It is recommended that this script not be run as root.\n";
-    print " Consider creating the service account \"dataverse\", giving it ownership\n";
-    print "  on the glassfish/domains/domain1/ and glassfish/lib/ directories,\n";
-    print "    along with the JVM-specified files.dir location, and designate\n";
-    print "    that account to launch and run the Application Server (Payara),\n";
-    print "           AND use that user account to run this installer.\n"; 
-    print "####################################################################\n";
-
-    unless ($noninteractive)
-    {
-	print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n";
-        system "stty cbreak </dev/tty >/dev/tty 2>&1";
-        unless ($noninteractive) {
-            my $key = getc(STDIN);
-        }
-        system "stty -cbreak </dev/tty >/dev/tty 2>&1";
-	print "\n";
-    }
-}
-
-# ensure $gfuser exists or bail
-my $gfidcmd="id $CONFIG_DEFAULTS{'GLASSFISH_USER'} > /dev/null";
-my $gfreturncode=system($gfidcmd);
-if ($gfreturncode != 0) {
-   die "Couldn't find user $gfuser. Please ensure the account exists and is readable by the user running this installer.\n";
-}
-
-# 0e. the following 2 options can also be specified on the command line, and 
-# also take precedence over the default values that are hard-coded and/or 
-# provided in the default.config file:
-
-if ($mailserver) {
-    $CONFIG_DEFAULTS{'MAIL_SERVER'} = $mailserver;
-}
-
-if ($gfdir) {
-    $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $gfdir;
-}
-
-# 1. CHECK FOR SOME MANDATORY COMPONENTS (WAR FILE, ETC.)
-# since we can't do anything without these things in place, better check for 
-# them before we go into the interactive config mode. 
-# (skip if this is a database-only setup)
-
-unless ($postgresonly) 
-{
-# 1a. war file: 
-    print "\nChecking if the application .war file is available... ";
-
-# if this installer section is running out of the installer zip bundle directory,
-# the war file will be sitting right here, named "dataverse.war": 
-
-    $WARFILE_LOCATION = "dataverse.war"; 
-
-# but if it's not here, this is probably a personal development 
-# setup, so their build should be up in their source tree:
-
-    unless ( -f $WARFILE_LOCATION ) {
-	my $DATAVERSE_VERSION = "";
-	my $DATAVERSE_POM_FILE = "../../modules/dataverse-parent/pom.xml";
-	if ( -f $DATAVERSE_POM_FILE ) 
-	{
-	    open DPF, $DATAVERSE_POM_FILE; 
-	    my $pom_line;
-	    while ($pom_line=<DPF>)
-	    {
-		chop $pom_line;
-		if ($pom_line =~/^[ \t]*<revision>([0-9\.]+)<\/revision>/)
-		{
-		    $DATAVERSE_VERSION=$1;
-		    last;
-		}	    
-	    }
-	    close DPF;
-	    
-	    if ($DATAVERSE_VERSION ne "") {
-		$WARFILE_LOCATION = "../../target/dataverse-" . $DATAVERSE_VERSION . ".war";
-	    }
-	}
-    }
-
-# But, if the war file cannot be found in either of the 2
-# places - we'll just have to give up:
-
-    unless ( -f $WARFILE_LOCATION ) {
-	print "\nWARNING: Can't find the project .war file!\n";
-	print "\tAre you running the installer in the right directory?\n";
-	print "\tHave you built the war file?\n";
-	print "\t(if not, build the project and run the installer again)\n";
-	
-	exit 0;
-    }
-    print " Yes, it is!\n";
-
-
-# 1b. check and remember the working dir:
-    chomp( $cwd = `pwd` );
-
-# 1d. jq executable: 
-
-    my $sys_path = $ENV{'PATH'};
-    my @sys_path_dirs = split( ":", $sys_path );
-
-    if ( $pod_name ne "start-glassfish") # Why is that again? 
-    {
-	for my $sys_path_dir (@sys_path_dirs) {
-	    if ( -x $sys_path_dir . "/jq" ) {
-		$jq_exec_path = $sys_path_dir;
-		last;
-	    }
-	}
-	if ( $jq_exec_path eq "" ) {
-	    print STDERR "\nERROR: I haven't been able to find the jq command in your PATH! Please install it from http://stedolan.github.io/jq/\n";
-	    exit 1;
-
-	}
-    }
-
-}
-
-
-# 2. INTERACTIVE CONFIG SECTION: 
-
-print "\nWelcome to the Dataverse installer.\n";
-unless ($postgresonly) {
-    print "You will be guided through the process of setting up a NEW\n";
-    print "instance of the dataverse application\n";
-}
-else {
-    print "You will be guided through the process of configuring your\n";
-    print "PostgreSQL database for use by the Dataverse application.\n";
-}
-
-my $yesno;
-
-unless ($noninteractive) 
-{
-    print "\nATTENTION: As of Dataverse v.4.19, we are offering a new, experimental \n";
-    print "version of the installer script, implemented in Python. It will eventually \n";
-    print "replace this script (implemented in Perl). Consult the file README_python.txt \n";
-    print "for more information on how to run it. \n";
-
-    print "\nWould you like to exit and use the new installer instead? [y/n] ";
-    $yesno = <>;
-    chop $yesno;
-
-    while ( $yesno ne "y" && $yesno ne "n" ) {
-	print "Please enter 'y' or 'n'!\n";
-	print "(or ctrl-C to exit the installer)\n";
-	$yesno = <>;
-	chop $yesno;
-    }
-
-    exit 0 if  $yesno eq "y";
-}
-
-ENTERCONFIG:
-
-print "\n";
-print "Please enter the following configuration values:\n";
-print "(hit [RETURN] to accept the default value)\n";
-print "\n";
-
-for my $ENTRY (@CONFIG_VARIABLES) 
-{
-    my $config_prompt = $CONFIG_PROMPTS{$ENTRY};
-    my $config_comment = $CONFIG_COMMENTS{$ENTRY};
-
-    if ( $config_comment eq '' ) 
-    {
-	print $config_prompt . ": ";
-	print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] ";
-    }
-    else 
-    {
-	print $config_prompt . $config_comment;
-	print "[" . $CONFIG_DEFAULTS{$ENTRY} . "] ";
-    }
-
-    my $user_entry = "";
-
-    # ($noninteractive means the installer is being run in the non-interactive mode; it will use 
-    # the default values specified so far, without prompting the user for alternative values)\
-    unless ($noninteractive) 
-    {
-        $user_entry = <>;
-        chop $user_entry;
-
-	if ( $user_entry ne "" ) {
-	    $CONFIG_DEFAULTS{$ENTRY} = $user_entry;
-	}
-
-	# for some values, we'll try to do some validation right here, in real time:
-    
-	if ($ENTRY eq 'ADMIN_EMAIL') 
-	{
-	    $user_entry = $CONFIG_DEFAULTS{$ENTRY};
-	    my $attempts = 0; 
-	    while ($user_entry !~/[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}/) 
-	    {
-		$attempts++;
-		print "Please enter a valid email address: ";
-		$user_entry = <>;
-		chop $user_entry;
-	    }
-
-	    if ($attempts) 
-	    {
-		print "OK, looks legit.\n";
-		$CONFIG_DEFAULTS{$ENTRY} = $user_entry;
-	    }
-	}
-	elsif ($ENTRY eq 'GLASSFISH_DIRECTORY') 
-	{
-            # CHECK IF GLASSFISH DIR LOOKS OK:
-	    print "\nChecking your Glassfish installation...";
-
-	    my $g_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'};
-
-
-	    unless ( -d $g_dir . "/glassfish/domains/domain1" ) 
-	    {
-		while ( !( -d $g_dir . "/glassfish/domains/domain1" ) ) 
-		{
-		    print "\nInvalid Glassfish directory " . $g_dir . "!\n";
-		    print "Enter the root directory of your Glassfish installation:\n";
-		    print "(Or ctrl-C to exit the installer): ";
-
-		    $g_dir = <>;
-		    chop $g_dir;
-		}
-	    }
-
-	    # verify that we can write in the Glassfish directory 
-	    # (now that we are no longer requiring to run the installer as root)
-
-	    my @g_testdirs = ( "/glassfish/domains/domain1",
-			       "/glassfish/domains/domain1/config",
-			       "/glassfish/lib");
-
-	    for my $test_dir (@g_testdirs)
-	    {
-		if (!(-w ($g_dir . $test_dir))) 
-		{
-		    print "\n";
-		    die("ERROR: " . $g_dir . $test_dir . " not writable to the user running the installer! Check permissions on Payara5 hierarchy.\n");
-		}
-	    }
-
-
-
-	    print "$g_dir looks OK!\n";
-	    $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} = $g_dir; 
-
-	}
-	elsif ($ENTRY eq 'MAIL_SERVER')
-	{
-	    my $smtp_server = "";
-	    while (! &validate_smtp_server() )
-	    {
-		print "Enter a valid SMTP (mail) server:\n";
-		print "(Or ctrl-C to exit the installer): ";
-
-		$smtp_server = <>;
-		chop $smtp_server;
-
-		$CONFIG_DEFAULTS{'MAIL_SERVER'} = $smtp_server unless $smtp_server eq ''; 
-	    }
-
-	    print "\nOK, we were able to establish connection to the SMTP server you have specified.\n";
-	    print "Please note that you *may* need to configure some extra settings before your \n";
-	    print "Dataverse can send email. Please consult the \"Mail Host Configuration & Authentication\"\n";
-	    print "section of the installation guide (http://guides.dataverse.org/en/latest/installation/installation-main.html)\n";
-	    print "for more information.\n";
-	}
-    }
-
-    print "\n";
-}
-
-# 2b. CONFIRM VALUES ENTERED:
-
-print "\nOK, please confirm what you've entered:\n\n";
-
-for my $ENTRY (@CONFIG_VARIABLES) {
-    print $CONFIG_PROMPTS{$ENTRY} . ": " . $CONFIG_DEFAULTS{$ENTRY} . "\n";
-}
-
-if ($noninteractive) {
-    $yesno = "y";
-}
-else {
-    print "\nIs this correct? [y/n] ";
-    $yesno = <>;
-    chop $yesno;
-}
-
-while ( $yesno ne "y" && $yesno ne "n" ) {
-    print "Please enter 'y' or 'n'!\n";
-    print "(or ctrl-C to exit the installer)\n";
-    $yesno = <>;
-    chop $yesno;
-}
-
-if ( $yesno eq "n" ) {
-    goto ENTERCONFIG;
-}
-
-# 3. SET UP POSTGRES USER AND DATABASE
-
-unless($pod_name eq "start-glassfish" || $pod_name eq "dataverse-glassfish-0" || $skipdatabasesetup) {
-    &setup_postgres(); 
-# (there's no return code - if anything goes wrong, the method will exit the script, with some diagnostic messages for the user)
-    print "\nOK, done.\n";
-
-    if ($postgresonly) 
-    {
-	exit 0;
-    }
-}
-
-# 5. CONFIGURE PAYARA
-
-my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'};
-
-my $done = &setup_appserver();
-
-# Check if the App is running: 
-
-unless ((
-     my $exit_code =
-     system( $glassfish_dir . "/bin/asadmin list-applications | grep -q '^dataverse'" )
-	) == 0 )
-{
-    # If the "asadmin list-applications" has failed, it may only mean that an earlier
-    # "asadmin login" had failed, and asadmin is now failing to run without the user
-    # supplying the username and password. (And the fact that we are trying to pile the  
-    # output to grep prevents it from providing the prompts). 
-    # So before we give up, we'll try an alternative: 
-
-    unless ((
-	my $exit_code_2 =
-	system( "curl http://localhost:8080/robots.txt | grep -q '^User-agent'" )
-	    ) == 0 )
-    {
-	print STDERR "It appears that the Dataverse application is not running...\n";
-	print STDERR "Even though the \"asadmin deploy\" command had succeeded earlier.\n\n";
-	print STDERR "Aborting - sorry...\n\n";
-
-	exit 1; 
-    }
-}
-
-
-print "\nOK, the Dataverse application appears to be running...\n\n";
-
-# Run the additional setup scripts, that populate the metadata block field values, create users
-# and dataverses, etc.
-
-unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-all.sh" ) {
-    chdir("../api");
-}
-
-unless ( -d "data" && -f "setup-datasetfields.sh" && -f "setup-users.sh" && -f "setup-dvs.sh" && -f "setup-builtin-roles.sh" && -f "setup-all.sh" ) {
-    print "\nERROR: Can't find the metadata and user/dataverse setup scripts!\n";
-    print "\tAre you running the installer in the right directory?\n";
-    exit 1;
-}
-
-# if there's an admin_email set from arguments, replace the value in `dv-root.json` (called by `setup-all.sh`)
-if ($admin_email)
-{
-	print "setting contact email for root dataverse to: $admin_email\n";
-	set_root_contact_email( $admin_email );
-}
-else
-{
-	print "using default contact email for root dataverse\n";
-}
-
-for my $script ( "setup-all.sh" ) {
-    # (there's only 1 setup script to run now - it runs all the other required scripts)
-    print "Executing post-deployment setup script " . $script . "... ";
-
-    my $my_hostname = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'};
-
-    # We used to filter the supplied scripts, replacing "localhost" and the port, in 
-    # case they are running Dataverse on a different port... Now we are simply requiring
-    # that the port 8080 is still configured in domain.xml when they are running the 
-    # installer:
-    my $run_script;
-    #if ( $my_hostname ne "localhost" ) {
-    #    system( "sed 's/localhost:8080/$my_hostname/g' < " . $script . " > tmpscript.sh; chmod +x tmpscript.sh" );
-    #    $run_script = "tmpscript.sh";
-    #}
-    #else {
-    $run_script = $script;
-    #}
-
-    unless ( my $exit_code = system( "./" . $run_script . " > $run_script.$$.log 2>&1") == 0 ) 
-    {
-        print "\nERROR executing script " . $script . "!\n";
-        exit 1;
-    }
-    print "done!\n";
-}
-
-# SOME ADDITIONAL SETTINGS THAT ARE NOT TAKEN CARE OF BY THE setup-all SCRIPT 
-# NEED TO BE CONFIGURED HERE:
-
-print "Making additional configuration changes...\n\n";
-
-
-# a. Configure the Admin email in the Dataverse settings:
-
-print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail" . "\n";
-
-my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'ADMIN_EMAIL'} . " " . $API_URL . "/admin/settings/:SystemEmail"); 
-if ( $exit_code )       
-{
-    print "WARNING: failed to configure the admin email in the Dataverse settings!\n\n";
-} 
-else 
-{
-    print "OK.\n\n";
-}
-    
-# b. If this installation is going to be using a remote SOLR search engine service, configure its location in the settings:
-
-if ($CONFIG_DEFAULTS{'SOLR_LOCATION'} ne 'LOCAL')
-{
-    print "Executing " . "curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort" . "\n";
-    my $exit_code = system("curl -X PUT -d " . $CONFIG_DEFAULTS{'SOLR_LOCATION'} . " " . $API_URL . "/admin/settings/:SolrHostColonPort"); 
-    if ( $exit_code )       
-    {
-	print "WARNING: failed to configure the location of the remote SOLR service!\n\n";
-    }
-    else 
-    {
-	print "OK.\n\n";
-    }
-}
-
-
-
-chdir($cwd);
-
-print "\n\nYou should now have a running Dataverse instance at\n";
-print "  http://" . $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'} . ":8080\n\n\n";
-
-if ($WARFILE_LOCATION =~/([0-9]\.[0-9]\.[0-9])\.war$/) 
-{
-    my $version = $1;
-    print "If this is a personal development installation, we recommend that you undeploy the currently-running copy \n"; 
-    print "of the application, with the following asadmin command:\n\n";
-    print "\t" . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . '/bin/asadmin undeploy dataverse-' . $version . "\n\n";
-    print "before attempting to deploy from your development environment in NetBeans.\n\n";
-}
-    
-
-print "\nYour Dataverse has been configured to use DataCite, to register DOI global identifiers in the \n";
-print "test name space \"10.5072\" with the \"shoulder\" \"FK2\"\n";
-print "However, you have to contact DataCite (support\@datacite.org) and request a test account, before you \n";
-print "can publish datasets. Once you receive the account name and password, add them to your domain.xml,\n";
-print "as the following two JVM options:\n";
-print "\t<jvm-options>-Ddoi.username=...</jvm-options>\n";
-print "\t<jvm-options>-Ddoi.password=...</jvm-options>\n";
-print "and restart payara5\n";
-print "If this is a production Dataverse and you are planning to register datasets as \n";
-print "\"real\", non-test DOIs or Handles, consult the \"Persistent Identifiers and Publishing Datasets\"\n";
-print "section of the Installataion guide, on how to configure your Dataverse with the proper registration\n";
-print "credentials.\n\n";
-
-
-
-# (going to skip the Rserve check; it's no longer a required, or even a recommended component)
-
-exit 0;
-
-# 9. FINALLY, CHECK IF RSERVE IS RUNNING:
-print "\n\nFinally, checking if Rserve is running and accessible...\n";
-
-unless ( $CONFIG_DEFAULTS{'RSERVE_PORT'} =~ /^[0-9][0-9]*$/ ) {
-    print $CONFIG_DEFAULTS{'RSERVE_HOST'} . " does not look like a valid port number,\n";
-    print "defaulting to 6311.\n\n";
-
-    $CONFIG_DEFAULTS{'RSERVE_PORT'} = 6311;
-}
-
-my ( $rserve_iaddr, $rserve_paddr, $rserve_proto );
-
-unless ( $rserve_iaddr = inet_aton( $CONFIG_DEFAULTS{'RSERVE_HOST'} ) ) {
-    print STDERR "Could not look up $CONFIG_DEFAULTS{'RSERVE_HOST'},\n";
-    print STDERR "the host you specified as your R server.\n";
-    print STDERR "\nDVN can function without a working R server, but\n";
-    print STDERR "much of the functionality concerning running statistics\n";
-    print STDERR "and analysis on quantitative data will not be available.\n";
-    print STDERR "Please consult the Installers guide for more info.\n";
-
-    exit 0;
-}
-
-$rserve_paddr = sockaddr_in( $CONFIG_DEFAULTS{'RSERVE_PORT'}, $rserve_iaddr );
-$rserve_proto = getprotobyname('tcp');
-
-unless ( socket( SOCK, PF_INET, SOCK_STREAM, $rserve_proto )
-    && connect( SOCK, $rserve_paddr ) )
-{
-    print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'RSERVE_HOST'}\n";
-    print STDERR "on port $CONFIG_DEFAULTS{'RSERVE_PORT'}, the address you provided\n";
-    print STDERR "for your R server.\n";
-    print STDERR "DVN can function without a working R server, but\n";
-    print STDERR "much of the functionality concerning running statistics\n";
-    print STDERR "and analysis on quantitative data will not be available.\n";
-    print STDERR "Please consult the \"Installing R\" section in the Installers guide\n";
-    print STDERR "for more info.\n";
-
-    exit 0;
-
-}
-
-close(SOCK);
-print "\nOK!\n";
-
-# 5. CONFIGURE PAYARA
-sub setup_appserver {
-    my $success = 1;
-    my $failure = 0;
-
-    my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'};
-
-    print "\nProceeding with the app. server (Payara5) setup.\n";
-
-# 5a. DETERMINE HOW MUCH MEMORY TO GIVE TO GLASSFISH AS HEAP:
-
-    my $gf_heap_default = "2048m";
-    my $sys_mem_total   = 0;
-
-    if ( -e "/proc/meminfo" && open MEMINFO, "/proc/meminfo" ) {
-	# Linux
-
-	while ( my $mline = <MEMINFO> ) {
-	    if ( $mline =~ /MemTotal:[ \t]*([0-9]*) kB/ ) {
-		$sys_mem_total = $1;
-	    }
-	}
-	
-	close MEMINFO;
-
-# TODO: Figure out how to determine the amount of memory when running in Docker
-# because we're wondering if Dataverse can run in the free OpenShift Online
-# offering that only gives you 1 GB of memory. Obviously, if this is someone's
-# first impression of Dataverse, we want to to run well! What if you try to
-# ingest a large file or perform other memory-intensive operations? For more
-# context, see https://github.com/IQSS/dataverse/issues/4040#issuecomment-331282286
-	if ( -e "/sys/fs/cgroup/memory/memory.limit_in_bytes" && open CGROUPMEM, "/sys/fs/cgroup/memory/memory.limit_in_bytes" ) {
-	    print "INFO: This system has the CGROUP file /sys/fs/cgroup/memory/memory.limit_in_bytes\n";
-	    while ( my $limitline = <CGROUPMEM> ) {
-		### TODO: NO, WE ARE NOT NECESSARILY IN DOCKER!
-		###print "We must be running in Docker! Fancy!\n";
-		# The goal of this cgroup check is for
-		# "Setting the heap limit for Glassfish/Payara to 750MB"
-		# to change to some other value, based on memory available.
-		print "INFO: /sys/fs/cgroup/memory/memory.limit_in_bytes: $limitline\n";
-		my $limit_in_kb = $limitline / 1024;
-		print "INFO: CGROUP limit_in_kb =  $limit_in_kb [ignoring]\n";
-		# In openshift.json, notice how PostgreSQL and Solr have
-		# resources.limits.memory set to "256Mi".
-		# If you try to give the Dataverse/Glassfish container twice
-		# as much memory (512 MB) and allow $sys_mem_total to
-		# be set below, you should see the following:
-		# "Setting the heap limit for Glassfish to 192MB."
-		# FIXME: dataverse.war will not deploy with only 512 MB of memory.
-		# Again, the goal is 1 GB total (512MB + 256MB + 256MB) for
-		# Glassfish, PostgreSQL, and Solr to fit in the free OpenShift tier.
-		#print "setting sys_mem_total to: $limit_in_kb\n";
-		#$sys_mem_total = $limit_in_kb;
-	    }
-	    close CGROUPMEM;
-	}
-    }
-    elsif ( -x "/usr/sbin/sysctl" ) 
-    {
-	# MacOS X, probably...
-
-	$sys_mem_total = `/usr/sbin/sysctl -n hw.memsize`;
-	chop $sys_mem_total;
-	if ( $sys_mem_total > 0 ) {
-	    $sys_mem_total = int( $sys_mem_total / 1024 );
-	    # size in kb
-	}
-    }
-
-    if ( $sys_mem_total > 0 ) {
-	# setting the default heap size limit to 3/8 of the available
-	# amount of memory:
-	$gf_heap_default = ( int( $sys_mem_total / ( 8 / 3 * 1024 ) ) );
-
-	print "\nSetting the heap limit for Payara5 to " . $gf_heap_default . "MB. \n";
-	print "You may need to adjust this setting to better suit \n";
-	print "your system.\n\n";
-
-	#$gf_heap_default .= "m";
-
-    }
-    else 
-    {
-	print "\nCould not determine the amount of memory on your system.\n";
-	print "Setting the heap limit for Payara5 to 2GB. You may need \n";
-	print "to  adjust the value to better suit your system.\n\n";
-    }
-
-    push @CONFIG_VARIABLES, "DEF_MEM_SIZE";
-    $CONFIG_DEFAULTS{"DEF_MEM_SIZE"} = $gf_heap_default;
-
-# TODO:
-# is the below still the case with Payara5? 
-# if the system has more than 4GB of memory (I believe), glassfish must
-# be run with the 64 bit flag set explicitly (at least that was the case
-# with the MacOS glassfish build...). Verify, and if still the case,
-# add a check.
-
-    print "\n*********************\n";
-    print "PLEASE NOTE, SOME OF THE ASADMIN COMMANDS ARE GOING TO FAIL,\n";
-    print "FOR EXAMPLE, IF A CONFIGURATION SETTING THAT WE ARE TRYING\n";
-    print "TO CREATE ALREADY EXISTS; OR IF A JVM OPTION THAT WE ARE\n";
-    print "DELETING DOESN'T. THESE \"FAILURES\" ARE NORMAL!\n";
-    print "*********************\n\n";
-    print "When/if asadmin asks you to \"Enter admin user name\",\n";
-    print "it should be safe to hit return and accept the default\n";
-    print "(which is \"admin\").\n";
-
-    print "\nPress any key to continue...\n\n";
-    
-    unless ($noninteractive)
-    {
-	system "stty cbreak </dev/tty >/dev/tty 2>&1";
-	unless ($noninteractive) {
-	    my $key = getc(STDIN);
-	}
-	system "stty -cbreak </dev/tty >/dev/tty 2>&1";
-    }
-	
-    print "\n";
-
-# 5b. start domain, if not running:
-    
-    my $javacheck = `java -version`;
-    my $exitcode  = $?;
-    unless ( $exitcode == 0 ) {
-	print STDERR "$javacheck\n" if $javacheck;
-	print STDERR "Do you have java installed?\n";
-	exit 1;
-    }
-    my $DOMAIN = "domain1";
-    my $DOMAIN_DOWN =
-	`$CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}/bin/asadmin list-domains | grep "$DOMAIN " | grep "not running"`;
-    print STDERR $DOMAIN_DOWN . "\n";
-    if ($DOMAIN_DOWN) {
-	print "Trying to start domain up...\n";
-	if ( $current_user eq $CONFIG_DEFAULTS{'GLASSFISH_USER'} ){
-		system( $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" );
-	}
-	else
-	{
-		system( "sudo -u $CONFIG_DEFAULTS{'GLASSFISH_USER'} " . $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/bin/asadmin start-domain domain1" );
-	}
-	# TODO: (?) - retest that the domain is running now? 
-    }
-    else
-    {
-	print "domain appears to be up...\n";
-    }
-
-# 5c. create asadmin login, so that the user doesn't have to enter
-# the username and password for every asadmin command, if
-# access to :4848 is password-protected:
-
-    system( $glassfish_dir. "/bin/asadmin login" );
-    
-# 5d. configure glassfish using ASADMIN commands:
-  
-    $success = &run_asadmin_script();
-
-# CHECK EXIT STATUS, BARF IF SETUP SCRIPT FAILED:
-
-    unless ($success) {
-	print "\nERROR! Failed to configure Payara5 domain!\n";
-	print "(see the error messages above - if any)\n";
-	print "Aborting...\n";
-	
-	exit 1;
-    }
-    
-# 5e. Additional config files:
-    
-    my $JHOVE_CONFIG = "jhove.conf";
-    my $JHOVE_CONF_SCHEMA = "jhoveConfig.xsd";
-    
-
-    my $JHOVE_CONFIG_DIST = $JHOVE_CONFIG; 
-    my $JHOVE_CONF_SCHEMA_DIST = $JHOVE_CONF_SCHEMA; 
-    
-# (if the installer is being run NOT as part of a distribution zipped bundle, but
-# from inside the source tree - adjust the locations of the jhove config files:
-
-    unless ( -f $JHOVE_CONFIG ) {
-	$JHOVE_CONFIG_DIST = "../../conf/jhove/jhove.conf";
-	$JHOVE_CONF_SCHEMA_DIST = "../../conf/jhove/jhoveConfig.xsd";
-    }
-
-# but if we can't find the files in either location, it must mean
-# that they are not running the script in the correct directory - so 
-# nothing else left for us to do but give up:
-
-    unless ( -f $JHOVE_CONFIG_DIST && -f $JHOVE_CONF_SCHEMA_DIST ) {
-	print "\nERROR! JHOVE configuration files not found in the config dir!\n";
-	print "(are you running the installer in the right directory?\n";
-	print "Aborting...\n";
-	exit 1;
-    }
-
-    print "\nCopying additional configuration files... ";
-
-    #system( "/bin/cp -f " . $JHOVE_CONF_SCHEMA_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" );
-    my $jhove_success = copy ($JHOVE_CONF_SCHEMA_DIST, $glassfish_dir . "/glassfish/domains/domain1/config");
-    unless ($jhove_success) 
-    {
-	print "\n*********************\n";
-	print "ERROR: failed to copy jhove config file into " . $glassfish_dir . "/glassfish/domains/domain1/config - do you have write permission in that directory?";
-	exit 1;
-    }
-
-# The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...)
-# - so it may need to be readjusted here: 
-
-    if ( $glassfish_dir ne "/usr/local/payara5" )
-    {
-	system( "sed 's:/usr/local/payara5:$glassfish_dir:g' < " . $JHOVE_CONFIG_DIST . " > " . $glassfish_dir . "/glassfish/domains/domain1/config/" . $JHOVE_CONFIG);
-    }
-    else 
-    {
-	system( "/bin/cp -f " . $JHOVE_CONFIG_DIST . " " . $glassfish_dir . "/glassfish/domains/domain1/config" );
-    }
-
-    print "done!\n";
-    
-# 5f. check if payara is running:
-# TODO.
-    
-# 5g. DEPLOY THE APPLICATION:
-
-    print "\nAttempting to deploy the application.\n";
-    print "Command line: " . $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION . "\n";
-    unless ((
-	my $exit_code =
-	system( $glassfish_dir . "/bin/asadmin deploy " . $WARFILE_LOCATION )
-	    ) == 0 )
-    {
-	print STDERR "Failed to deploy the application! WAR file: " . $WARFILE_LOCATION . ".\n";
-	print STDERR "(exit code: " . $exit_code . ")\n";
-	print STDERR "Aborting.\n";
-	exit 1;
-    }
-
-
-    print "Finished configuring Payara and deploying the dataverse application.  \n";
-
-
-    return $success;
-}
-
-sub run_asadmin_script {
-    my $success = 1;
-    my $failure = 0;
-
-    # We are going to run a standalone shell script with a bunch of asadmin
-    # commands to set up all the Payara components for the application.
-    # All the parameters must be passed to that script as environmental
-    # variables:
-
-    $ENV{'GLASSFISH_ROOT'}   = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'};
-    $ENV{'GLASSFISH_DOMAIN'} = "domain1";
-    $ENV{'ASADMIN_OPTS'}     = "";
-    $ENV{'MEM_HEAP_SIZE'}    = $CONFIG_DEFAULTS{'DEF_MEM_SIZE'};
-
-    $ENV{'DB_PORT'} = $CONFIG_DEFAULTS{'POSTGRES_PORT'};
-    $ENV{'DB_HOST'} = $CONFIG_DEFAULTS{'POSTGRES_SERVER'};
-    $ENV{'DB_NAME'} = $CONFIG_DEFAULTS{'POSTGRES_DATABASE'};
-    $ENV{'DB_USER'} = $CONFIG_DEFAULTS{'POSTGRES_USER'};
-    $ENV{'DB_PASS'} = $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'};
-
-    $ENV{'RSERVE_HOST'} = $CONFIG_DEFAULTS{'RSERVE_HOST'};
-    $ENV{'RSERVE_PORT'} = $CONFIG_DEFAULTS{'RSERVE_PORT'};
-    $ENV{'RSERVE_USER'} = $CONFIG_DEFAULTS{'RSERVE_USER'};
-    $ENV{'RSERVE_PASS'} = $CONFIG_DEFAULTS{'RSERVE_PASSWORD'};
-    $ENV{'DOI_BASEURL'} = $CONFIG_DEFAULTS{'DOI_BASEURL'};
-    $ENV{'DOI_USERNAME'} = $CONFIG_DEFAULTS{'DOI_USERNAME'};
-    $ENV{'DOI_PASSWORD'} = $CONFIG_DEFAULTS{'DOI_PASSWORD'};
-    $ENV{'DOI_DATACITERESTAPIURL'} = $CONFIG_DEFAULTS{'DOI_DATACITERESTAPIURL'};
-    
-    $ENV{'HOST_ADDRESS'} = $CONFIG_DEFAULTS{'HOST_DNS_ADDRESS'};
-
-    my ($mail_server_host, $mail_server_port) = split (":", $CONFIG_DEFAULTS{'MAIL_SERVER'});
-
-    $ENV{'SMTP_SERVER'}  = $mail_server_host;
-
-    if ($mail_server_port) 
-    {
-	$ENV{'SMTP_SERVER_PORT'} = $mail_server_port; 
-    }
-
-    $ENV{'FILES_DIR'} =
-      $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'} . "/glassfish/domains/" . $ENV{'GLASSFISH_DOMAIN'} . "/files";
-    
-    system("./as-setup.sh");
-
-    if ($?) {
-        return $failure;
-    }
-    return $success;
-}
-
-sub create_pg_hash {
-    my $pg_username = shift @_;
-    my $pg_password = shift @_;
-
-    my $encode_line = $pg_password . $pg_username;
-
-    # for Redhat:
-
-    ##print STDERR "executing /bin/echo -n $encode_line | md5sum\n";
-
-    my $hash;
-    if ( $WORKING_OS eq "MacOSX" ) {
-        $hash = `/bin/echo -n $encode_line | md5`;
-    }
-    else {
-        $hash = `/bin/echo -n $encode_line | md5sum`;
-    }
-
-    chop $hash;
-
-    $hash =~ s/  \-$//;
-
-    if ( ( length($hash) != 32 ) || ( $hash !~ /^[0-9a-f]*$/ ) ) {
-        print STDERR "Failed to generate a MD5-encrypted password hash for the Postgres database.\n";
-        exit 1;
-    }
-
-    return $hash;
-}
-
-sub validate_smtp_server {
-    my ( $mail_server_iaddr, $mail_server__paddr, $mail_server_proto, $mail_server_status );
-
-    $mail_server_status = 1;
-
-    my $userentry = $CONFIG_DEFAULTS{'MAIL_SERVER'};
-    my ($testserver, $testport) = split (":", $userentry);
-
-    unless ( $mail_server_iaddr = inet_aton( $testserver ) ) {
-	print STDERR "Could not look up $testserver,\n";
-	print STDERR "the host you specified as your mail server\n";
-	$mail_server_status = 0;
-    }
-
-    if ($mail_server_status) {
-	$testport = 25 unless $testport; 
-	my $mail_server_paddr = sockaddr_in( $testport, $mail_server_iaddr );
-	$mail_server_proto = getprotobyname('tcp');
-
-       unless ( socket( SOCK, PF_INET, SOCK_STREAM, $mail_server_proto )
-		 && connect( SOCK, $mail_server_paddr ) )
-	{
-	    print STDERR "Could not establish connection to $CONFIG_DEFAULTS{'MAIL_SERVER'},\n";
-	    print STDERR "the address you provided for your Mail server.\n";
-	    print STDERR "Please select a valid mail server, and try again.\n\n";
-
-	    $mail_server_status = 0;
-	}
-
-	close(SOCK);
-    }
-
-    return $mail_server_status; 
-}
-
-# support function for set_root_contact_email
-sub search_replace_file
-{
-        my ($infile, $pattern, $replacement, $outfile) = @_;
-        open (my $inp, $infile);
-        local $/ = undef;
-        my $txt = <$inp>;
-        close $inp;
-        $txt =~s/$pattern/$replacement/g;
-        open (my $opf, '>:encoding(UTF-8)', $outfile);
-        print $opf $txt;
-        close $opf;
-        return;
-}
-# set the email address for the default `dataverseAdmin` account
-sub set_root_contact_email
-{
-        my ($contact_email) = @_;
-        my $config_json = "data/user-admin.json";
-        search_replace_file($config_json,"\"email\":\"dataverse\@mailinator.com\"","\"email\":\"$contact_email\"",$config_json);
-        return;
-}
-
-
-sub setup_postgres {
-    my $pg_local_connection = 0;
-    my $pg_major_version = 0;
-    my $pg_minor_version = 0;
-
-
-# We'll need to do a few things as the Postgres admin user; 
-# We'll assume the name of the admin user is "postgres". 
-    my $POSTGRES_ADMIN_USER = "postgres";
-
-
-
-##Handling container env
-
-    if ($pod_name eq "start-glassfish")
-    {
-	# When we are in this openshift "start-glassfish" pod, we get all the 
-	# Postgres configuration from the environmental variables. 
-	print "Init container starting \n";
-	$CONFIG_DEFAULTS{'POSTGRES_SERVER'} = $ENV{"POSTGRES_SERVER"} . "." .  $ENV{"POSTGRES_SERVICE_HOST"};
-	$CONFIG_DEFAULTS{'POSTGRES_DATABASE'} = $ENV{"POSTGRES_DATABASE"};
-	$CONFIG_DEFAULTS{'POSTGRES_USER'} = $ENV{"POSTGRES_USER"};
-	$CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} = $ENV{"POSTGRES_ADMIN_PASSWORD"};
-	# there was a weird case of the postgres admin password option spelled differently in openshift.json 
-	# - as "POSTGRESQL_ADMIN_PASSWORD"; I'm going to change it in openshift.json - but I'm leaving this
-	# next line here, just in case: (L.A. -- Sept. 2018)
-	$CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} = $ENV{'POSTGRESQL_ADMIN_PASSWORD'};
-	$CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} = $ENV{"POSTGRES_PASSWORD"};
-    }
-
-    if ( $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq 'localhost' || $CONFIG_DEFAULTS{'POSTGRES_SERVER'} eq '127.0.0.1' ) 
-    {
-	$pg_local_connection = 1;
-    } 
-#    elsif ($postgresonly) 
-#    {
-#	print "In the --pg_only mode the script can only be run LOCALLY,\n";
-#	print "i.e., on the server where PostgresQL is running, with the\n";
-#	print "Postgres server address as localhost - \"127.0.0.1\".\n";
-#	exit 1;
-#    }
-
-#If it is executing in a container, proceed easy with this all-in-one block
-
-
-
-
-# 3b. LOCATE THE psql EXECUTABLE:
-
-    if ( $pod_name eq "start-glassfish"){
-        $psql_exec_path = "/usr/bin"    
-    } 
-    else 
-    {
-	my $sys_path = $ENV{'PATH'};
-	my @sys_path_dirs = split( ":", $sys_path );
-
-	for my $sys_path_dir (@sys_path_dirs) {
-	    
-	    if ( -x $sys_path_dir . "/psql" ) {
-		$psql_exec_path = $sys_path_dir;
-
-		last;
-	    }
-	}
-    }
-
-    my $psql_major_version = 0;
-    my $psql_minor_version = 0;
-
-# 3c. IF PSQL WAS FOUND IN THE PATH, CHECK ITS VERSION:
-
-    unless ( $psql_exec_path eq "" ) {
-	open( PSQLOUT, $psql_exec_path . "/psql --version|" );
-
-	my $psql_version_line = <PSQLOUT>;
-	chop $psql_version_line;
-	close PSQLOUT;
-    
-	my ( $postgresName, $postgresNameLong, $postgresVersion ) = split( " ", $psql_version_line );
-
-	unless ( $postgresName eq "psql" && $postgresVersion =~ /^[0-9][0-9\.]*$/ ) {
-	    print STDERR "\nWARNING: Unexpected output from psql command!\n";
-	}
-	else 
-	{
-	    my (@psql_version_tokens) = split( '\.', $postgresVersion );
-
-	    print "\n\nFound Postgres psql command, version $postgresVersion.\n\n";
-
-	    $psql_major_version = $psql_version_tokens[0];
-	    $psql_minor_version = $psql_version_tokens[1];
-
-	    $pg_major_version = $psql_major_version;
-	    $pg_minor_version = $psql_minor_version;
-
-	}
-    }
-
-# a frequent problem with MacOSX is that the copy of psql found in the PATH
-# belongs to the older version of PostgresQL supplied with the OS, which happens
-# to be incompatible with the newer builds from the Postgres project; which are
-# recommended to be used with Dataverse. So if this is a MacOSX box, we'll
-# check what other versions of PG are available, and select the highest version
-# we can find:
-
-    if ( $WORKING_OS eq "MacOSX" ) {
-	my $macos_pg_major_version = 0;
-	my $macos_pg_minor_version = 0;
-	
-	for $macos_pg_minor_version ( "9", "8", "7", "6", "5", "4", "3", "2", "1", "0" ) {
-	    if ( -x "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin/psql" ) {
-		$macos_pg_major_version = 9;
-		if (   ( $macos_pg_major_version > $psql_major_version )
-		       || ( $macos_pg_minor_version >= $psql_minor_version ) )
-		{
-		    $psql_exec_path        = "/Library/PostgreSQL/9." . $macos_pg_minor_version . "/bin";
-		    $pg_major_version = $macos_pg_major_version;
-		    $pg_minor_version = $macos_pg_minor_version;
-		}
-		last;
-	    }
-	}
-    }
-
-    my $psql_admin_exec = "";
-
-    if ( $psql_exec_path eq "" ) 
-    { 
-	if ( $pg_local_connection || $noninteractive) 
-	{
-	    print STDERR "\nERROR: I haven't been able to find the psql command in your PATH!\n";
-	    print STDERR "Please make sure PostgresQL is properly installed; if necessary, add\n";
-	    print STDERR "the location of psql to the PATH, then try again.\n\n";
-	    
-	    exit 1;
-	}
-	else 
-	{
-	    print "WARNING: I haven't been able to find the psql command in your PATH!\n";
-	    print "But since we are configuring a Dataverse instance to use a remote Postgres server,\n";
-	    print "we can still set up the database by running a setup script on that remote server\n";
-	    print "(see below for instructions).\n";
-	    
-	}
-    } else {
-
-	print "(Using psql version " . $pg_major_version . "." . $pg_minor_version . ": " . $psql_exec_path . "/psql)\n";
-
-
-	$psql_admin_exec = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_ADMIN_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec_path; 
-	$psql_exec_path = "PGPASSWORD=" . $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} . "; export PGPASSWORD; " . $psql_exec_path; 
-
-	print "Checking if we can talk to Postgres as the admin user...\n";
-    }
-    
-# 3d. CHECK IF WE CAN TALK TO POSTGRES AS THE ADMIN:
-
-    if ($psql_exec_path eq "" || system( $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) 
-    {
-	# No, we can't. :(
-	if ($pg_local_connection || $noninteractive) 
-	{
-	    # If Postgres is running locally, this is a fatal condition. 
-	    # We'll give them some (potentially) helpful pointers and exit.
-
-	    print "(Tried executing: " . $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1) \n";
-	    print "Nope, I haven't been able to connect to the local instance of PostgresQL as the admin user.\n";
-	    print "\nIs postgresql running? \n";
-	    print "   On a RedHat-like system, you can check the status of the daemon with\n\n";
-	    print "      service postgresql start\n\n";
-	    print "   On MacOSX, use Applications -> PostgresQL -> Start Server.\n";
-	    print "   (or, if there's no \"Start Server\" item in your PostgresQL folder, \n";
-	    print "   simply restart your MacOSX system!)\n";
-	    print "\nAlso, please make sure that the daemon is listening to network connections!\n";
-	    print "   - at least on the localhost interface. (See \"Installing Postgres\" section\n";
-	    print "   of the installation manual).\n";
-	    print "\nFinally, did you supply the correct admin password?\n";
-	    print "   Don't know the admin password for your Postgres installation?\n";
-	    print "   - then simply set the access level to \"trust\" temporarily (for localhost only!)\n";
-	    print "   in your pg_hba.conf file. Again, please consult the \n";
-	    print "   installation manual).\n";
-	    exit 1;
-	}
-	else 
-	{
-	    # If we are configuring the Dataverse instance to use a Postgres server 
-	    # running on a remote host, it is possible to configure the database
-	    # without opening remote access for the admin user. They will simply 
-	    # have to run this script in the "postgres-only" mode on that server, locally, 
-	    # then resume the installation here: 
-	    print "(Tried executing: " . $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -c 'SELECT * FROM pg_roles' > /dev/null 2>&1)\n\n";
-	    print "Haven't been able to connect to the remote Postgres server as the admin user.\n";
-	    print "(Or you simply don't have psql installed on this server)\n";
-	    print "It IS possible to configure a database for your Dataverse on a remote server,\n";
-	    print "without having admin access to that remote Postgres installation.\n\n";
-	    print "In order to do that, please copy the installer (the entire package) to the server\n";
-	    print "where PostgresQL is running and run the installer with the \"--pg_only\" option:\n\n";
-	    print "   ./install --pg_only\n\n";
-
-	    print "Press any key to continue the installation process once that has been\n";
-	    print "done. Or press ctrl-C to exit the installer.\n\n";
-	    
-	    system "stty cbreak </dev/tty >/dev/tty 2>&1";
-	    my $key = getc(STDIN);
-	    system "stty -cbreak </dev/tty >/dev/tty 2>&1";
-	    print "\n";
-	}
-    }
-    else 
-    {
-	print "Yes, we can!\n";
-
-	# ok, we can proceed with configuring things...
-
-	print "\nConfiguring Postgres Database:\n";
-
-	# 4c. CHECK IF THIS DB ALREADY EXISTS:
-    
-	my $psql_command_dbcheck =
-	    $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -c '' -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ">/dev/null 2>&1";
-
-	if ( ( my $exitcode = system($psql_command_dbcheck) ) == 0 ) 
-	{    
-	    if ($force) 
-	    {
-		print "WARNING! Database "
-		    . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'}
-		. " already exists but --force given... continuing.\n";
-	    } 
-	    else 
-	    {
-		print "WARNING! Database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " already exists!\n";
-
-		if ($noninteractive)
-		{
-		    exit 1;
-		}
-		else 
-		{
-		    print "\nPress any key to continue, or ctrl-C to exit the installer...\n\n";
-        
-		    system "stty cbreak </dev/tty >/dev/tty 2>&1";
-		    my $key = getc(STDIN);
-		    system "stty -cbreak </dev/tty >/dev/tty 2>&1";
-		    print "\n";
-
-		}
-	    }
-	}
-
-	# 3e. CHECK IF THIS USER ALREADY EXISTS:
-    
-	my $psql_command_rolecheck =
-	    $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -c '' -d postgres " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " >/dev/null 2>&1";
-	my $exitcode; 
-
-	if ( ( $exitcode = system($psql_command_rolecheck) ) == 0 ) 
-	{
-	    print "User (role) " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " already exists;\n";
-	    print "Proceeding.";
-	}
-	else 
-	{
-	    # 3f. CREATE DVN DB USER:
-	    
-	    print "\nCreating Postgres user (role) for the DVN:\n";
-    
-	    open TMPCMD, ">/tmp/pgcmd.$$.tmp";
-
-	    # with md5-encrypted password:
-	    my $pg_password_md5 =
-		&create_pg_hash( $CONFIG_DEFAULTS{'POSTGRES_USER'}, $CONFIG_DEFAULTS{'POSTGRES_PASSWORD'} );
-	    my $sql_command =
-		"CREATE ROLE \""
-		. $CONFIG_DEFAULTS{'POSTGRES_USER'}
-	    . "\" PASSWORD 'md5"
-		. $pg_password_md5
-		. "' NOSUPERUSER CREATEDB CREATEROLE INHERIT LOGIN";
-
-	    print TMPCMD $sql_command;
-	    close TMPCMD;
-        
-	    my $psql_commandline = $psql_admin_exec . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER . " -d postgres -f /tmp/pgcmd.$$.tmp >/dev/null 2>&1";
-
-	    my $out      = qx($psql_commandline 2>&1);
-	    $exitcode = $?;
-	    unless ( $exitcode == 0 ) 
-	    {
-		print STDERR "Could not create the DVN Postgres user role!\n";
-		print STDERR "(SQL: " . $sql_command . ")\n";
-		print STDERR "(psql exit code: " . $exitcode . ")\n";
-		print STDERR "(STDERR and STDOUT was: " . $out . ")\n";
-		exit 1;
-	    }
-
-	    unlink "/tmp/pgcmd.$$.tmp";
-	    print "done.\n";
-	}
-    
-	# 3g. CREATE DVN DB:
-    
-	print "\nCreating Postgres database:\n";
-    
-	my $psql_command =
-	    $psql_admin_exec
-	    . "/createdb -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $POSTGRES_ADMIN_USER ." "
-	    . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " --owner="
-	    . $CONFIG_DEFAULTS{'POSTGRES_USER'};
-
-	my $out = qx($psql_command 2>&1);
-        $exitcode = $?;
-	unless ( $exitcode == 0 ) 
-	{
-	    print STDERR "Could not create Postgres database for the Dataverse app!\n";
-	    print STDERR "(command: " . $psql_command . ")\n";
-	    print STDERR "(psql exit code: " . $exitcode . ")\n";
-	    print STDERR "(STDOUT and STDERR: " . $out . ")\n";
-	    if ($force) 
-	    {
-		print STDERR "\ncalled with --force, continuing\n";
-	    }
-	    else 
-	    {
-		print STDERR "\naborting the installation (sorry!)\n\n";
-		exit 1;
-	    }
-	}
-    }
-
-# Whether the user and the database were created locally or remotely, we'll now 
-# verify that we can talk to that database, with the credentials of the database
-# user that we want the Dataverse application to be using: 
-
-    if ( $psql_exec_path ne "" && system( $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -c 'SELECT * FROM pg_roles' > /dev/null 2>&1" ) ) 
-    {
-	print STDERR "Oops, haven't been able to connect to the database " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . ",\n";
-	print STDERR "running on " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} . ", as user " . $CONFIG_DEFAULTS{'POSTGRES_USER'} . ".\n\n";
-	print STDERR "Aborting the installation (sorry!)\n";
-	exit 1; 
-    }
-}
-
-sub read_config_defaults {
-    my $config_file = shift @_;
-
-    unless ( -f $config_file ) 
-    {
-	print STDERR "Can't find the config file " . $config_file . "!\n";
-	exit 1;
-    }
-
-    open CF, $config_file || die "Can't open config file " . $config_file . " for reading.\n"; 
-
-    while (<CF>)
-    {
-	chop; 
-
-	if ( $_ =~/^[A-Z]/ && $_ =~/ *= */ ) 
-	{
-	    my ($name, $value) = split(/ *= */, $_, 2);
-	    $CONFIG_DEFAULTS{$name} = $value;
-	}
-    }
-    close CF;
-}
-
-sub read_interactive_config_values {
-    my $config_file = shift @_;
-
-    unless ( -f $config_file ) 
-    {
-	print STDERR "Can't find the config file " . $config_file . "!\n";
-	exit 1;
-    }
-
-    open CF, $config_file || die "Can't open config file " . $config_file . " for reading.\n"; 
-
-    my $mode = "";
-
-    while (<CF>)
-    {
-	chop; 
-
-	if ( $_ eq "[prompts]" || $_ eq "[comments]" )
-	{
-	    $mode = $_; 
-	}
-
-	if ( $_ =~/^[A-Z]/ && $_ =~/ *= */ ) 
-	{
-	    my ($name, $value) = split(/ *= */, $_, 2);
-	    
-	    if ( $mode eq "[prompts]" ) 
-	    {
-		$CONFIG_PROMPTS{$name} = $value;
-	    }
-	    elsif ( $mode eq "[comments]" )
-	    {
-		$value =~s/\\n/\n/g;
-		$CONFIG_COMMENTS{$name} = $value;
-	    }
-	}
-    }
-    close CF;
-}
diff --git a/scripts/installer/install.py b/scripts/installer/install.py
index ea1a69db6a7..5a7b9f75696 100644
--- a/scripts/installer/install.py
+++ b/scripts/installer/install.py
@@ -252,8 +252,8 @@
    # 1d. check java version
    java_version = subprocess.check_output(["java", "-version"], stderr=subprocess.STDOUT).decode()
    print("Found java version "+java_version)
-   if not re.search('(1.8|11)', java_version):
-      sys.exit("Dataverse requires OpenJDK 1.8 or 11. Please make sure it's in your PATH, and try again.")
+   if not re.search('(17)', java_version):
+      sys.exit("Dataverse requires OpenJDK 17. Please make sure it's in your PATH, and try again.")
 
    # 1e. check if the setup scripts - setup-all.sh, are available as well, maybe?
    # @todo (?)
@@ -314,7 +314,7 @@
                   gfDir = config.get('glassfish', 'GLASSFISH_DIRECTORY')
                   while not test_appserver_directory(gfDir):
                      print("\nInvalid Payara directory!")
-                     gfDir = read_user_input("Enter the root directory of your Payara5 installation:\n(Or ctrl-C to exit the installer): ")
+                     gfDir = read_user_input("Enter the root directory of your Payara installation:\n(Or ctrl-C to exit the installer): ")
                   config.set('glassfish', 'GLASSFISH_DIRECTORY', gfDir)
                elif option == "mail_server":
                   mailServer = config.get('system', 'MAIL_SERVER')
@@ -380,12 +380,13 @@
       print("Can't connect to PostgresQL as the admin user.\n")
       sys.exit("Is the server running, have you adjusted pg_hba.conf, etc?")
 
-   # 3b. get the Postgres version (do we need it still?)
+   # 3b. get the Postgres version for new permissions model in versions 15+
    try:
-      pg_full_version = conn.server_version
-      print("PostgresQL version: "+str(pg_full_version))
+      pg_full_version = str(conn.server_version)
+      pg_major_version = pg_full_version[0:2]
+      print("PostgreSQL version: "+pg_major_version)
    except:
-      print("Warning: Couldn't determine PostgresQL version.")
+      print("Warning: Couldn't determine PostgreSQL version.")
    conn.close()
 
    # 3c. create role:
@@ -410,6 +411,8 @@
       else:
          sys.exit("Couldn't create database or database already exists.\n")
 
+   # 3e. set permissions:
+
    conn_cmd = "GRANT ALL PRIVILEGES on DATABASE "+pgDb+" to "+pgUser+";"
    try:
       cur.execute(conn_cmd)
@@ -418,6 +421,19 @@
    cur.close()
    conn.close()
 
+   if int(pg_major_version) >= 15:
+      conn_cmd = "GRANT CREATE ON SCHEMA public TO "+pgUser+";"
+      print("PostgreSQL 15 or higher detected. Running " + conn_cmd)
+      try:
+         cur.execute(conn_cmd)
+      except:
+         if force:
+            print("WARNING: failed to grant permissions on schema public - continuing, since the --force option was specified")
+         else:
+            sys.exit("Couldn't grant privileges on schema public to "+pgUser)
+      cur.close()
+      conn.close()
+
    print("Database and role created!")
    if pgOnly:
       print("postgres-only setup complete.")
@@ -511,12 +527,12 @@
 try: 
    copy2(jhoveConfigSchemaDist, gfConfigDir)
    # The JHOVE conf file has an absolute PATH of the JHOVE config schema file (uh, yeah...)
-   # and may need to be adjusted, if Payara is installed anywhere other than /usr/local/payara5:
-   if gfDir == "/usr/local/payara5":
+   # and may need to be adjusted, if Payara is installed anywhere other than /usr/local/payara6:
+   if gfDir == "/usr/local/payara6":
       copy2(jhoveConfigDist, gfConfigDir)
    else:
-      # use sed to replace /usr/local/payara5 in the distribution copy with the real gfDir:
-      sedCommand = "sed 's:/usr/local/payara5:"+gfDir+":g' < " + jhoveConfigDist + " > " + gfConfigDir + "/" + jhoveConfig
+      # use sed to replace /usr/local/payara6 in the distribution copy with the real gfDir:
+      sedCommand = "sed 's:/usr/local/payara6:"+gfDir+":g' < " + jhoveConfigDist + " > " + gfConfigDir + "/" + jhoveConfig
       subprocess.call(sedCommand, shell=True)
 
    print("done.")
@@ -578,8 +594,8 @@
 print("However, you have to contact DataCite (support\@datacite.org) and request a test account, before you ")
 print("can publish datasets. Once you receive the account name and password, add them to your domain.xml,")
 print("as the following two JVM options:")
-print("\t<jvm-options>-Ddoi.username=...</jvm-options>")
-print("\t<jvm-options>-Ddoi.password=...</jvm-options>")
+print("\t<jvm-options>-Ddataverse.pid.datacite.username=...</jvm-options>")
+print("\t<jvm-options>-Ddataverse.pid.datacite.password=...</jvm-options>")
 print("and restart payara")
 print("If this is a production Dataverse and you are planning to register datasets as ")
 print("\"real\", non-test DOIs or Handles, consult the \"Persistent Identifiers and Publishing Datasets\"")
diff --git a/scripts/installer/installAppServer.py b/scripts/installer/installAppServer.py
index 8b719ac09d1..698f5ba9a58 100644
--- a/scripts/installer/installAppServer.py
+++ b/scripts/installer/installAppServer.py
@@ -3,7 +3,7 @@
 
 def runAsadminScript(config):
    # We are going to run a standalone shell script with a bunch of asadmin                                      
-   # commands to set up all the app. server (payara5) components for the application.                                       
+   # commands to set up all the app. server (payara6) components for the application.
    # All the parameters must be passed to that script as environmental                                          
    # variables:
    os.environ['GLASSFISH_DOMAIN'] = "domain1";
diff --git a/scripts/installer/installUtils.py b/scripts/installer/installUtils.py
index 7cc368de5f8..ff5e6eb708d 100644
--- a/scripts/installer/installUtils.py
+++ b/scripts/installer/installUtils.py
@@ -57,7 +57,7 @@ def test_appserver_directory(directory):
 
         #print("version: major: "+str(major_version)+", minor: "+str(minor_version))
 
-        if major_version != 5 or minor_version < 201:
+        if major_version != 6 or minor_version < 2023:
             return False
         return True
 
diff --git a/scripts/installer/interactive.config b/scripts/installer/interactive.config
index 86ea926fe5d..ef8110c554f 100644
--- a/scripts/installer/interactive.config
+++ b/scripts/installer/interactive.config
@@ -26,7 +26,7 @@ DOI_BASEURL = Datacite URL
 DOI_DATACITERESTAPIURL = Datacite REST API URL
 [comments]
 HOST_DNS_ADDRESS = :(enter numeric IP address, if FQDN is unavailable)
-GLASSFISH_USER = :This user will be running the App. Server (Payara5) service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest you create the account "dataverse", or use any other unprivileged user account\n:
+GLASSFISH_USER = :This user will be running the App. Server (Payara) service on your system.\n - If this is a dev. environment, this should be your own username; \n - In production, we suggest you create the account "dataverse", or use any other unprivileged user account\n:
 GLASSFISH_DIRECTORY = 
 GLASSFISH_REQUEST_TIMEOUT = :\n Defaults to 1800 seconds (30 minutes)
 ADMIN_EMAIL = :\n(please enter a valid email address!) 
diff --git a/scripts/search/tests/data/dataset-finch1-nolicense.json b/scripts/search/tests/data/dataset-finch1-nolicense.json
new file mode 100644
index 00000000000..ec0856a2aa3
--- /dev/null
+++ b/scripts/search/tests/data/dataset-finch1-nolicense.json
@@ -0,0 +1,77 @@
+{
+  "datasetVersion": {
+    "metadataBlocks": {
+      "citation": {
+        "fields": [
+          {
+            "value": "Darwin's Finches",
+            "typeClass": "primitive",
+            "multiple": false,
+            "typeName": "title"
+          },
+          {
+            "value": [
+              {
+                "authorName": {
+                  "value": "Finch, Fiona",
+                  "typeClass": "primitive",
+                  "multiple": false,
+                  "typeName": "authorName"
+                },
+                "authorAffiliation": {
+                  "value": "Birds Inc.",
+                  "typeClass": "primitive",
+                  "multiple": false,
+                  "typeName": "authorAffiliation"
+                }
+              }
+            ],
+            "typeClass": "compound",
+            "multiple": true,
+            "typeName": "author"
+          },
+          {
+            "value": [ 
+                { "datasetContactEmail" : {
+                    "typeClass": "primitive",
+                    "multiple": false,
+                    "typeName": "datasetContactEmail",
+                    "value" : "finch@mailinator.com"
+                },
+                "datasetContactName" : {
+                    "typeClass": "primitive",
+                    "multiple": false,
+                    "typeName": "datasetContactName",
+                    "value": "Finch, Fiona"
+                }
+            }],
+            "typeClass": "compound",
+            "multiple": true,
+            "typeName": "datasetContact"
+          },
+          {
+            "value": [ {
+               "dsDescriptionValue":{
+                "value":   "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.",
+                "multiple":false,
+               "typeClass": "primitive",
+               "typeName": "dsDescriptionValue"
+            }}],
+            "typeClass": "compound",
+            "multiple": true,
+            "typeName": "dsDescription"
+          },
+          {
+            "value": [
+              "Medicine, Health and Life Sciences"
+            ],
+            "typeClass": "controlledVocabulary",
+            "multiple": true,
+            "typeName": "subject"
+          }
+        ],
+        "displayName": "Citation Metadata"
+      }
+    }
+  }
+}
diff --git a/scripts/search/tests/data/dataset-finch1.json b/scripts/search/tests/data/dataset-finch1.json
index ec0856a2aa3..433ea758711 100644
--- a/scripts/search/tests/data/dataset-finch1.json
+++ b/scripts/search/tests/data/dataset-finch1.json
@@ -1,5 +1,9 @@
 {
   "datasetVersion": {
+    "license": {
+      "name": "CC0 1.0",
+      "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+    },
     "metadataBlocks": {
       "citation": {
         "fields": [
diff --git a/scripts/search/tests/data/dataset-finch2.json b/scripts/search/tests/data/dataset-finch2.json
index d20f835b629..446df54676a 100644
--- a/scripts/search/tests/data/dataset-finch2.json
+++ b/scripts/search/tests/data/dataset-finch2.json
@@ -1,5 +1,9 @@
 {
   "datasetVersion": {
+    "license": {
+      "name": "CC0 1.0",
+      "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+    },
     "metadataBlocks": {
       "citation": {
         "fields": [
diff --git a/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh b/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh
index 0cfdd20c272..367aa214563 100755
--- a/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh
+++ b/scripts/tests/ec2-memory-benchmark/ec2-memory-benchmark-remote.sh
@@ -5,7 +5,7 @@ then
     EC2_HTTP_LOCATION="<EC2 INSTANCE HTTP ADDRESS>"
 fi
 
-DATAVERSE_APP_DIR=/usr/local/payara5/glassfish/domains/domain1/applications/dataverse; export DATAVERSE_APP_DIR
+DATAVERSE_APP_DIR=/usr/local/payara6/glassfish/domains/domain1/applications/dataverse; export DATAVERSE_APP_DIR
 
 # restart app server
 
diff --git a/scripts/vagrant/install-dataverse.sh b/scripts/vagrant/install-dataverse.sh
deleted file mode 100644
index c9873f7d3ec..00000000000
--- a/scripts/vagrant/install-dataverse.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env bash
-
-if [ ! -z "$1" ]; then
-  MAILSERVER=$1
-  MAILSERVER_ARG="--mailserver $MAILSERVER"
-fi
-WAR=/dataverse/target/dataverse*.war
-if [ ! -f $WAR ]; then
-  echo "no war file found... building"
-  #echo "Installing nss on CentOS 6 to avoid java.security.KeyException while building war file: https://github.com/IQSS/dataverse/issues/2744"
-  #yum install -y nss
-  su $SUDO_USER -s /bin/sh -c "cd /dataverse && source /etc/profile.d/maven.sh && mvn -q package"
-fi
-cd /dataverse/scripts/installer
-
-# move any pre-existing `default.config` file out of the way to avoid overwriting
-pid=$$
-if [ -e default.config ]; then
-	cp default.config tmp-${pid}-default.config
-fi
-
-# Switch to newer Python-based installer
-python3 ./install.py --noninteractive --config_file="default.config"
-
-if [ -e tmp-${pid}-default.config ]; then # if we moved it out, move it back
-	mv -f tmp-${pid}-default.config default.config
-fi
-
-echo "If "vagrant up" was successful (check output above) Dataverse is running on port 8080 of the Linux machine running within Vagrant, but this port has been forwarded to port 8088 of the computer you ran "vagrant up" on. For this reason you should go to http://localhost:8088 to see the Dataverse app running."
-
-echo "Please also note that the installation script has now started Payara, but has not set up an autostart mechanism for it.\nTherefore, the next time this VM is started, Payara must be started manually.\nSee https://guides.dataverse.org/en/latest/installation/prerequisites.html#launching-payara-on-system-boot for details."
diff --git a/scripts/vagrant/rpmbuild.sh b/scripts/vagrant/rpmbuild.sh
deleted file mode 100755
index f10830afb5b..00000000000
--- a/scripts/vagrant/rpmbuild.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-rpm -Uvh http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-7.noarch.rpm
-yum install -y rpm-build httpd-devel libapreq2-devel R-devel
diff --git a/scripts/vagrant/setup-counter-processor.sh b/scripts/vagrant/setup-counter-processor.sh
deleted file mode 100755
index a418e8d6251..00000000000
--- a/scripts/vagrant/setup-counter-processor.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-echo "Setting up counter-processor"
-echo "Installing dependencies"
-yum -y install unzip vim-enhanced
-yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
-# EPEL provides Python 3.6.6, new enough (3.6.4 in .python-version)
-yum -y install python36 jq
-# "ensurepip" tip from https://stackoverflow.com/questions/50408941/recommended-way-to-install-pip3-on-centos7/52518512#52518512
-python3.6 -m ensurepip
-# FIXME: actually use this dedicated "counter" user.
-COUNTER_USER=counter
-echo "Ensuring Unix user '$COUNTER_USER' exists"
-useradd $COUNTER_USER || :
-COMMIT='7974dad259465ba196ef639f48dea007cae8f9ac'
-UNZIPPED_DIR="counter-processor-$COMMIT"
-if [ ! -e $UNZIPPED_DIR ]; then
-  ZIP_FILE="${COMMIT}.zip"
-  echo "Downloading and unzipping $ZIP_FILE"
-  wget https://github.com/CDLUC3/counter-processor/archive/$ZIP_FILE
-  unzip $ZIP_FILE
-fi
-cd $UNZIPPED_DIR
-echo "Installation of the GeoLite2 country database for counter-processor can no longer be automated. See the Installation Guide for the manual installation process."
-pip3 install -r requirements.txt
-# For now, parsing sample_logs/counter_2018-05-08.log
-for i in `echo {00..31}`; do
-  # avoid errors like: No such file or directory: 'sample_logs/counter_2018-05-01.log'
-  touch sample_logs/counter_2018-05-$i.log
-done
-#LOG_GLOB="sample_logs/counter_2018-05-*.log"
-#START_DATE="2018-05-08"
-#END_DATE="2018-05-09"
-CONFIG_FILE=/dataverse/scripts/vagrant/counter-processor-config.yaml python3.6 main.py
diff --git a/scripts/vagrant/setup-solr.sh b/scripts/vagrant/setup-solr.sh
deleted file mode 100755
index 70d3fc632a7..00000000000
--- a/scripts/vagrant/setup-solr.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-echo "Setting up Solr"
-dnf install -qy lsof
-SOLR_USER=solr
-SOLR_HOME=/usr/local/solr
-mkdir $SOLR_HOME
-chown $SOLR_USER:$SOLR_USER $SOLR_HOME
-su $SOLR_USER -s /bin/sh -c "cp /dataverse/downloads/solr-8.11.1.tgz $SOLR_HOME"
-su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME && tar xfz solr-8.11.1.tgz"
-su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME/solr-8.11.1/server/solr && cp -r configsets/_default . && mv _default collection1"
-su $SOLR_USER -s /bin/sh -c "cp /dataverse/conf/solr/8.11.1/schema*.xml $SOLR_HOME/solr-8.11.1/server/solr/collection1/conf/"
-su $SOLR_USER -s /bin/sh -c "cp /dataverse/conf/solr/8.11.1/solrconfig.xml $SOLR_HOME/solr-8.11.1/server/solr/collection1/conf/solrconfig.xml"
-su $SOLR_USER -s /bin/sh -c "cd $SOLR_HOME/solr-8.11.1 && bin/solr start && bin/solr create_core -c collection1 -d server/solr/collection1/conf/"
-cp /dataverse/doc/sphinx-guides/source/_static/installation/files/etc/init.d/solr /etc/init.d/solr
-chmod 755 /etc/init.d/solr
-/etc/init.d/solr stop
-/etc/init.d/solr start
-chkconfig solr on
diff --git a/scripts/vagrant/setup.sh b/scripts/vagrant/setup.sh
deleted file mode 100644
index 0af4afb22af..00000000000
--- a/scripts/vagrant/setup.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/bin/bash
-echo "Installing dependencies for Dataverse"
-
-# wget seems to be missing in box 'bento/centos-8.2'
-dnf install -qy wget
-
-# python3 and psycopg2 for the Dataverse installer
-dnf install -qy python3 python3-psycopg2
-
-# JQ
-echo "Installing jq for the setup scripts"
-dnf install -qy epel-release
-dnf install -qy jq
-
-echo "Adding Shibboleth yum repo"
-cp /dataverse/conf/vagrant/etc/yum.repos.d/shibboleth.repo /etc/yum.repos.d
-# Uncomment this (and other shib stuff below) if you want
-# to use Vagrant (and maybe PageKite) to test Shibboleth.
-#yum install -y shibboleth shibboleth-embedded-ds
-
-# java configuration et alia
-dnf install -qy java-11-openjdk-devel httpd mod_ssl unzip
-alternatives --set java /usr/lib/jvm/jre-11-openjdk/bin/java
-java -version
-
-# maven included in centos8 requires 1.8.0 - download binary instead
-wget -q https://archive.apache.org/dist/maven/maven-3/3.8.2/binaries/apache-maven-3.8.2-bin.tar.gz
-tar xfz apache-maven-3.8.2-bin.tar.gz
-mkdir /opt/maven
-mv apache-maven-3.8.2/* /opt/maven/
-echo "export JAVA_HOME=/usr/lib/jvm/jre-openjdk" > /etc/profile.d/maven.sh
-echo "export M2_HOME=/opt/maven" >> /etc/profile.d/maven.sh
-echo "export MAVEN_HOME=/opt/maven" >> /etc/profile.d/maven.sh
-echo "export PATH=/opt/maven/bin:${PATH}" >> /etc/profile.d/maven.sh
-chmod 0755 /etc/profile.d/maven.sh
-
-# disable centos8 postgresql module and install postgresql13-server
-dnf -qy module disable postgresql
-dnf install -qy https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm
-dnf install -qy postgresql13-server
-/usr/pgsql-13/bin/postgresql-13-setup initdb
-/usr/bin/systemctl stop postgresql-13
-cp /dataverse/conf/vagrant/var/lib/pgsql/data/pg_hba.conf /var/lib/pgsql/13/data/pg_hba.conf
-/usr/bin/systemctl start postgresql-13
-/usr/bin/systemctl enable postgresql-13
-
-PAYARA_USER=dataverse
-echo "Ensuring Unix user '$PAYARA_USER' exists"
-useradd $PAYARA_USER || :
-SOLR_USER=solr
-echo "Ensuring Unix user '$SOLR_USER' exists"
-useradd $SOLR_USER || :
-DOWNLOAD_DIR='/dataverse/downloads'
-PAYARA_ZIP="$DOWNLOAD_DIR/payara-5.2022.3.zip"
-SOLR_TGZ="$DOWNLOAD_DIR/solr-8.11.1.tgz"
-if [ ! -f $PAYARA_ZIP ] || [ ! -f $SOLR_TGZ ]; then
-    echo "Couldn't find $PAYARA_ZIP or $SOLR_TGZ! Running download script...."
-    cd $DOWNLOAD_DIR && ./download.sh && cd
-    echo "Done running download script."
-fi
-PAYARA_USER_HOME=~dataverse
-PAYARA_ROOT=/usr/local/payara5
-if [ ! -d $PAYARA_ROOT ]; then
-  echo "Copying $PAYARA_ZIP to $PAYARA_USER_HOME and unzipping"
-  su $PAYARA_USER -s /bin/sh -c "cp $PAYARA_ZIP $PAYARA_USER_HOME"
-  su $PAYARA_USER -s /bin/sh -c "cd $PAYARA_USER_HOME && unzip -q $PAYARA_ZIP"
-  # default.config defaults to /usr/local/payara5 so let's go with that
-  rsync -a $PAYARA_USER_HOME/payara5/ $PAYARA_ROOT/
-else
-  echo "$PAYARA_ROOT already exists"
-fi
-
-#service shibd start
-/usr/bin/systemctl stop httpd
-cp /dataverse/conf/httpd/conf.d/dataverse.conf /etc/httpd/conf.d/dataverse.conf
-mkdir -p /var/www/dataverse/error-documents
-cp /dataverse/conf/vagrant/var/www/dataverse/error-documents/503.html /var/www/dataverse/error-documents
-/usr/bin/systemctl start httpd
-#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /tmp/pdurbin.pagekite.me
-#cp -a /etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml.orig
-#cp -a /etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml.orig
-# need more attributes, such as sn, givenName, mail
-#cp /dataverse/conf/vagrant/etc/shibboleth/attribute-map.xml /etc/shibboleth/attribute-map.xml
-# FIXME: automate this?
-#curl 'https://www.testshib.org/cgi-bin/sp2config.cgi?dist=Others&hostname=pdurbin.pagekite.me' > /etc/shibboleth/shibboleth2.xml
-#cp /dataverse/conf/vagrant/etc/shibboleth/shibboleth2.xml /etc/shibboleth/shibboleth2.xml
-#service shibd restart
-#curl -k --sslv3 https://pdurbin.pagekite.me/Shibboleth.sso/Metadata > /downloads/pdurbin.pagekite.me
-#service httpd restart
-
-echo "#########################################################################################"
-echo "# This is a Vagrant test box, so we're disabling firewalld. 			      #
-echo "# Re-enable it with $ sudo systemctl enable firewalld && sudo systemctl start firewalld #"
-echo "#########################################################################################"
-systemctl disable firewalld
-systemctl stop firewalld
diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile
new file mode 100644
index 00000000000..ed670294873
--- /dev/null
+++ b/src/main/docker/Dockerfile
@@ -0,0 +1,61 @@
+# Copyright 2023 Forschungszentrum Jülich GmbH
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+################################################################################################################
+#
+# THIS FILE IS TO BE USED WITH MAVEN DOCKER BUILD:
+# mvn -Pct clean package
+#
+################################################################################################################
+#
+# Some commands used are inspired by https://github.com/payara/Payara/tree/master/appserver/extras/docker-images.
+# Most parts origin from older versions of https://github.com/gdcc/dataverse-kubernetes.
+#
+# We are not using upstream Payara images because:
+#  - Their image is less optimised for production usage and Dataverse by design choices
+#  - We provide multi-arch images
+#  - We provide some tweaks for development and monitoring
+#
+
+# Make the Java base image and version configurable (useful for trying newer Java versions and flavors)
+ARG BASE_IMAGE="gdcc/base:unstable"
+FROM $BASE_IMAGE
+
+# Make Payara use the "ct" profile for MicroProfile Config. Will switch various defaults for the application
+# setup in META-INF/microprofile-config.properties.
+# See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile
+ENV MP_CONFIG_PROFILE=ct
+
+# Workaround to configure upload directories by default to useful place until we can have variable lookups in
+# defaults for glassfish-web.xml and other places.
+ENV DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads"
+ENV DATAVERSE_FILES_DOCROOT="${STORAGE_DIR}/docroot"
+
+# Copy app and deps from assembly in proper layers
+COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/
+COPY --chown=payara:payara maven/app ${DEPLOY_DIR}/dataverse/
+COPY --chown=payara:payara maven/supplements ${DEPLOY_DIR}/dataverse/supplements/
+COPY --chown=payara:payara maven/scripts ${SCRIPT_DIR}/
+RUN chmod +x "${SCRIPT_DIR}"/*
+
+# Create symlinks for jHove
+RUN ln -s "${DEPLOY_DIR}/dataverse/supplements/jhove.conf" "${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}/config/jhove.conf" && \
+    ln -s "${DEPLOY_DIR}/dataverse/supplements/jhoveConfig.xsd" "${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}/config/jhoveConfig.xsd" && \
+    sed -i "${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}/config/jhove.conf" -e "s:/usr/local/payara./glassfish/domains/domain1:${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}:g"
+
+LABEL org.opencontainers.image.created="@git.build.time@" \
+      org.opencontainers.image.authors="Research Data Management at FZJ <forschungsdaten@fz-juelich.de>" \
+      org.opencontainers.image.url="https://guides.dataverse.org/en/latest/container/" \
+      org.opencontainers.image.documentation="https://guides.dataverse.org/en/latest/container/" \
+      org.opencontainers.image.source="https://github.com/IQSS/dataverse" \
+      org.opencontainers.image.version="@project.version@" \
+      org.opencontainers.image.revision="@git.commit.id.abbrev@" \
+      org.opencontainers.image.vendor="Global Dataverse Community Consortium" \
+      org.opencontainers.image.licenses="Apache-2.0" \
+      org.opencontainers.image.title="Dataverse Application Image" \
+      org.opencontainers.image.description="This container image provides the research data repository software Dataverse in a box." \
+      org.dataverse.deps.postgresql.version="@postgresql.server.version@" \
+      org.dataverse.deps.solr.version="@solr.version@"
\ No newline at end of file
diff --git a/src/main/docker/README.md b/src/main/docker/README.md
new file mode 100644
index 00000000000..06e2769ed6e
--- /dev/null
+++ b/src/main/docker/README.md
@@ -0,0 +1,62 @@
+# Dataverse Application Container Image
+
+The "application image" offers you a deployment-ready Dataverse application running on the underlying
+application server, which is provided by the [base image](https://hub.docker.com/r/gdcc/base). 
+Its sole purpose is to bundle the application and any additional material necessary to successfully jumpstart
+the application.
+
+Note: Until all :ref:`jvm-options` are *MicroProfile Config* enabled, it also adds the necessary scripting glue to
+configure the applications domain during booting the application server. See :ref:`app-tunables`.
+
+## Quick Reference
+
+**Maintained by:**
+
+This image is created, maintained and supported by the Dataverse community on a best-effort basis.
+
+**Where to find documentation:**
+
+The [Dataverse Container Guide - Application Image](https://guides.dataverse.org/en/latest/container/app-image.html)
+provides in-depth information about content, building, tuning and so on for this image. You should also consult
+the [Dataverse Container Guide - Base Image](https://guides.dataverse.org/en/latest/container/base-image.html) page
+for more details on tunable settings, locations, etc.
+
+**Where to get help and ask questions:**
+
+IQSS will not offer support on how to deploy or run it. Please reach out to the community for help on using it.
+You can join the Community Chat on Matrix at https://chat.dataverse.org and https://groups.google.com/g/dataverse-community
+to ask for help and guidance.
+
+## Supported Image Tags
+
+This image is sourced within the main upstream code [repository of the Dataverse software](https://github.com/IQSS/dataverse).
+Development and maintenance of the [image's code](https://github.com/IQSS/dataverse/tree/develop/src/main/docker)
+happens there (again, by the community). Community-supported image tags are based on the two most important branches:
+
+- The `unstable` tag corresponds to the `develop` branch, where pull requests are merged.
+  ([`Dockerfile`](https://github.com/IQSS/dataverse/tree/develop/src/main/docker/Dockerfile))
+- The `alpha` tag corresponds to the `master` branch, where releases are cut from.
+  ([`Dockerfile`](https://github.com/IQSS/dataverse/tree/master/src/main/docker/Dockerfile))
+
+Within the main repository, you may find the application image files at `<git root>/src/main/docker`.
+This Maven module uses the [Maven Docker Plugin](https://dmp.fabric8.io) to build and ship the image.
+You may use, extend, or alter this image to your liking and/or host in some different registry if you want to.
+
+**Supported architectures:** This image is created as a "multi-arch image", supporting the most common architectures
+Dataverse usually runs on: AMD64 (Windows/Linux/...) and ARM64 (Apple M1/M2).
+
+## License
+
+Image content created by the community is licensed under [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0),
+like the [main Dataverse project](https://github.com/IQSS/dataverse/blob/develop/LICENSE.md).
+
+Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and limitations under the License.
+
+As with all Docker images, all images likely also contain other software which may be under other licenses (such as
+[Payara Server](https://github.com/payara/Payara/blob/master/LICENSE.txt), Bash, etc., from the base
+distribution, along with any direct or indirect (Java) dependencies contained).
+
+As for any pre-built image usage, it is the image user's responsibility to ensure that any use of this image complies
+with any relevant licenses for all software contained within.
diff --git a/src/main/docker/assembly.xml b/src/main/docker/assembly.xml
new file mode 100644
index 00000000000..9f9b39617a3
--- /dev/null
+++ b/src/main/docker/assembly.xml
@@ -0,0 +1,28 @@
+<assembly xmlns="http://maven.apache.org/ASSEMBLY/2.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/ASSEMBLY/2.0.0 http://maven.apache.org/xsd/assembly-2.0.0.xsd">
+    <fileSets>
+        <!-- Get our app, but exclude deps -->
+        <fileSet>
+            <directory>target/${project.artifactId}-${project.version}</directory>
+            <outputDirectory>app</outputDirectory>
+            <excludes>
+                <exclude>WEB-INF/lib/**/*</exclude>
+            </excludes>
+        </fileSet>
+        <!-- Get our dependencies in a seperate folder (image layer cache!) -->
+        <fileSet>
+            <directory>target/${project.artifactId}-${project.version}/WEB-INF/lib</directory>
+            <outputDirectory>deps</outputDirectory>
+        </fileSet>
+        <!-- Supplemental data (configs, metadata, ...) -->
+        <fileSet>
+            <directory>conf/jhove</directory>
+            <outputDirectory>supplements</outputDirectory>
+        </fileSet>
+        <!-- Init scripts and usage scripts (bootstrapping, configuration, ...) -->
+        <fileSet>
+            <directory>src/main/docker/scripts</directory>
+            <outputDirectory>scripts</outputDirectory>
+        </fileSet>
+    </fileSets>
+</assembly>
\ No newline at end of file
diff --git a/src/main/docker/scripts/init_2_configure.sh b/src/main/docker/scripts/init_2_configure.sh
new file mode 100755
index 00000000000..a98f08088c1
--- /dev/null
+++ b/src/main/docker/scripts/init_2_configure.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+################################################################################
+# Configure Payara
+#
+# BEWARE: As this is done for Kubernetes, we will ALWAYS start with a fresh container!
+#         When moving to Payara 5+ the option commands are idempotent.
+#         The resources are to be created by the application on deployment,
+#         once Dataverse has proper refactoring, etc.
+################################################################################
+
+# Fail on any error
+set -euo pipefail
+
+# Include some sane defaults (which are currently not settable via MicroProfile Config).
+# This is an ugly hack and shall be removed once #7000 is resolved.
+export dataverse_auth_password__reset__timeout__in__minutes="${dataverse_auth_password__reset__timeout__in__minutes:-60}"
+export dataverse_timerServer="${dataverse_timerServer:-true}"
+export dataverse_files_storage__driver__id="${dataverse_files_storage__driver__id:-local}"
+if [ "${dataverse_files_storage__driver__id}" = "local" ]; then
+  export dataverse_files_local_type="${dataverse_files_local_type:-file}"
+  export dataverse_files_local_label="${dataverse_files_local_label:-Local}"
+  export dataverse_files_local_directory="${dataverse_files_local_directory:-${STORAGE_DIR}/store}"
+fi
+
+# 0. Define postboot commands file to be read by Payara and clear it
+DV_POSTBOOT=${PAYARA_DIR}/dataverse_postboot
+echo "# Dataverse postboot configuration for Payara" > "${DV_POSTBOOT}"
+
+# 2. Domain-spaced resources (JDBC, JMS, ...)
+# TODO: This is ugly and dirty. It should be replaced with resources from
+#       EE 8 code annotations or at least glassfish-resources.xml
+# NOTE: postboot commands is not multi-line capable, thus spaghetti needed.
+
+# JavaMail
+echo "INFO: Defining JavaMail."
+echo "create-javamail-resource --mailhost=${DATAVERSE_MAIL_HOST:-smtp} --mailuser=${DATAVERSE_MAIL_USER:-dataversenotify} --fromaddress=${DATAVERSE_MAIL_FROM:-dataverse@localhost} mail/notifyMailSession" >> "${DV_POSTBOOT}"
+
+# 3. Domain based configuration options
+# Set Dataverse environment variables
+echo "INFO: Defining system properties for Dataverse configuration options."
+#env | grep -Ee "^(dataverse|doi)_" | sort -fd
+env -0 | grep -z -Ee "^(dataverse|doi)_" | while IFS='=' read -r -d '' k v; do
+    # transform __ to -
+    # shellcheck disable=SC2001
+    KEY=$(echo "${k}" | sed -e "s#__#-#g")
+    # transform remaining single _ to .
+    KEY=$(echo "${KEY}" | tr '_' '.')
+
+    # escape colons in values
+    # shellcheck disable=SC2001
+    v=$(echo "${v}" | sed -e 's/:/\\\:/g')
+
+    echo "DEBUG: Handling ${KEY}=${v}."
+    echo "create-system-properties ${KEY}=${v}" >> "${DV_POSTBOOT}"
+done
+
+# 4. Add the commands to the existing postboot file, but insert BEFORE deployment
+TMPFILE=$(mktemp)
+cat "${DV_POSTBOOT}" "${POSTBOOT_COMMANDS}" > "${TMPFILE}" && mv "${TMPFILE}" "${POSTBOOT_COMMANDS}"
+echo "DEBUG: postboot contains the following commands:"
+echo "--------------------------------------------------"
+cat "${POSTBOOT_COMMANDS}"
+echo "--------------------------------------------------"
+
diff --git a/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh b/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh
new file mode 100644
index 00000000000..c234ad33307
--- /dev/null
+++ b/src/main/docker/scripts/init_3_wait_dataverse_db_host.sh
@@ -0,0 +1,4 @@
+#It was reported on 9949 that on the first launch of the containers Dataverse would not be deployed on payara
+#this was caused by a race condition due postgress not being ready. A solion for docker compose was prepared
+#but didn't work due a compatibility issue on the Maven pluggin [https://github.com/fabric8io/docker-maven-plugin/issues/888]
+wait-for "${DATAVERSE_DB_HOST:-postgres}:${DATAVERSE_DB_PORT:-5432}" -t 120
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java
index f6cbd01ece0..f1bfc3e290b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AbstractGlobalIdServiceBean.java
@@ -3,11 +3,13 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.InputStream;
-
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
+
+import org.apache.commons.lang3.RandomStringUtils;
 import org.jsoup.Jsoup;
 import org.jsoup.nodes.Document;
 import org.jsoup.nodes.Element;
@@ -17,27 +19,21 @@ public abstract class AbstractGlobalIdServiceBean implements GlobalIdServiceBean
 
     private static final Logger logger = Logger.getLogger(AbstractGlobalIdServiceBean.class.getCanonicalName());
 
-    @EJB
+    @Inject
     DataverseServiceBean dataverseService;
     @EJB
+    protected
     SettingsServiceBean settingsService;
-    @EJB
-    EjbDataverseEngine commandEngine;
-    @EJB
-    DatasetServiceBean datasetService;
-    @EJB
-    DataFileServiceBean datafileService;
-    @EJB
+    @Inject
+    protected
+    DvObjectServiceBean dvObjectService;
+    @Inject
     SystemConfig systemConfig;
+
+    protected Boolean configured = null;
     
     public static String UNAVAILABLE = ":unav";
 
-    @Override
-    public String getIdentifierForLookup(String protocol, String authority, String identifier) {
-        logger.log(Level.FINE,"getIdentifierForLookup");
-        return protocol + ":" + authority + "/" + identifier;
-    }
-
     @Override
     public Map<String, String> getMetadataForCreateIndicator(DvObject dvObjectIn) {
         logger.log(Level.FINE,"getMetadataForCreateIndicator(DvObject)");
@@ -101,14 +97,10 @@ protected String getTargetUrl(DvObject dvObjectIn) {
     
     @Override
     public String getIdentifier(DvObject dvObject) {
-        return dvObject.getGlobalId().asString();
+        GlobalId gid = dvObject.getGlobalId();
+        return gid != null ? gid.asString() : null;
     }
 
-    protected String getTargetUrl(Dataset datasetIn) {
-        logger.log(Level.FINE,"getTargetUrl");
-        return systemConfig.getDataverseSiteUrl() + Dataset.TARGET_URL + datasetIn.getGlobalIdString();
-    }
-    
     protected String generateYear (DvObject dvObjectIn){
         return dvObjectIn.getYearPublishedCreated(); 
     }
@@ -120,16 +112,41 @@ public Map<String, String> getMetadataForTargetURL(DvObject dvObject) {
         return metadata;
     }
     
+    @Override
+    public boolean alreadyRegistered(DvObject dvo) throws Exception {
+        if(dvo==null) {
+            logger.severe("Null DvObject sent to alreadyRegistered().");
+            return false;
+        }
+        GlobalId globalId = dvo.getGlobalId();
+        if(globalId == null) {
+            return false;
+        }
+        return alreadyRegistered(globalId, false);
+    }
+
+    public abstract boolean alreadyRegistered(GlobalId globalId, boolean noProviderDefault) throws Exception;
+
+    /*
+     * ToDo: the DvObject being sent in provides partial support for the case where
+     * it has a different authority/protocol than what is configured (i.e. a legacy
+     * Pid that can actually be updated by the Pid account being used.) Removing
+     * this now would potentially break/make it harder to handle that case prior to
+     * support for configuring multiple Pid providers. Once that exists, it would be
+     * cleaner to always find the PidProvider associated with the
+     * protocol/authority/shoulder of the current dataset and then not pass the
+     * DvObject as a param. (This would also remove calls to get the settings since
+     * that would be done at construction.)
+     */
     @Override
     public DvObject generateIdentifier(DvObject dvObject) {
 
         String protocol = dvObject.getProtocol() == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Protocol) : dvObject.getProtocol();
         String authority = dvObject.getAuthority() == null ? settingsService.getValueForKey(SettingsServiceBean.Key.Authority) : dvObject.getAuthority();
-        GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(protocol, commandEngine.getContext());
         if (dvObject.isInstanceofDataset()) {
-            dvObject.setIdentifier(datasetService.generateDatasetIdentifier((Dataset) dvObject, idServiceBean));
+            dvObject.setIdentifier(generateDatasetIdentifier((Dataset) dvObject));
         } else {
-            dvObject.setIdentifier(datafileService.generateDataFileIdentifier((DataFile) dvObject, idServiceBean));
+            dvObject.setIdentifier(generateDataFileIdentifier((DataFile) dvObject));
         }
         if (dvObject.getProtocol() == null) {
             dvObject.setProtocol(protocol);
@@ -140,6 +157,232 @@ public DvObject generateIdentifier(DvObject dvObject) {
         return dvObject;
     }
     
+    //ToDo just send the DvObject.DType
+    public String generateDatasetIdentifier(Dataset dataset) {
+        //ToDo - track these in the bean
+        String identifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString");
+        String shoulder = settingsService.getValueForKey(SettingsServiceBean.Key.Shoulder, "");
+
+        switch (identifierType) {
+            case "randomString":
+                return generateIdentifierAsRandomString(dataset, shoulder);
+            case "storedProcGenerated":
+                return generateIdentifierFromStoredProcedureIndependent(dataset, shoulder);
+            default:
+                /* Should we throw an exception instead?? -- L.A. 4.6.2 */
+                return generateIdentifierAsRandomString(dataset, shoulder);
+        }
+    }
+
+
+    /**
+     * Check that a identifier entered by the user is unique (not currently used
+     * for any other study in this Dataverse Network) also check for duplicate
+     * in EZID if needed
+     * @param userIdentifier
+     * @param dataset
+     * @return {@code true} if the identifier is unique, {@code false} otherwise.
+     */
+    public boolean isGlobalIdUnique(GlobalId globalId) {
+        if ( ! dvObjectService.isGlobalIdLocallyUnique(globalId)  ) {
+            return false; // duplication found in local database
+        }
+
+        // not in local DB, look in the persistent identifier service
+        try {
+            return ! alreadyRegistered(globalId, false);
+        } catch (Exception e){
+            //we can live with failure - means identifier not found remotely
+        }
+
+        return true;
+    }
+
+    /** 
+     *   Parse a Persistent Id and set the protocol, authority, and identifier
+     * 
+     *   Example 1: doi:10.5072/FK2/BYM3IW
+     *       protocol: doi
+     *       authority: 10.5072
+     *       identifier: FK2/BYM3IW
+     * 
+     *   Example 2: hdl:1902.1/111012
+     *       protocol: hdl
+     *       authority: 1902.1
+     *       identifier: 111012
+     *
+     * @param identifierString
+     * @param separator the string that separates the authority from the identifier.
+     * @param destination the global id that will contain the parsed data.
+     * @return {@code destination}, after its fields have been updated, or
+     *         {@code null} if parsing failed.
+     */
+    @Override
+    public GlobalId parsePersistentId(String fullIdentifierString) {
+        if(!isConfigured()) {
+            return null;
+        }
+        // Occasionally, the protocol separator character ':' comes in still
+        // URL-encoded as %3A (usually as a result of the URL having been 
+        // encoded twice):
+        fullIdentifierString = fullIdentifierString.replace("%3A", ":");
+        
+        int index1 = fullIdentifierString.indexOf(':');
+        if (index1 > 0) { // ':' found with one or more characters before it
+            String protocol = fullIdentifierString.substring(0, index1);
+            GlobalId globalId = parsePersistentId(protocol, fullIdentifierString.substring(index1+1));
+            return globalId;
+        }
+        logger.log(Level.INFO, "Error parsing identifier: {0}: ''<protocol>:'' not found in string", fullIdentifierString);
+        return null;
+    }
+
+    protected GlobalId parsePersistentId(String protocol, String identifierString) {
+        if(!isConfigured()) {
+            return null;
+        }
+        String authority;
+        String identifier;
+        if (identifierString == null) {
+            return null;
+        }
+        int index = identifierString.indexOf('/');
+        if (index > 0 && (index + 1) < identifierString.length()) {
+            // '/' found with one or more characters
+            // before and after it
+            // Strip any whitespace, ; and ' from authority (should finding them cause a
+            // failure instead?)
+            authority = GlobalIdServiceBean.formatIdentifierString(identifierString.substring(0, index));
+            if (GlobalIdServiceBean.testforNullTerminator(authority)) {
+                return null;
+            }
+            identifier = GlobalIdServiceBean.formatIdentifierString(identifierString.substring(index + 1));
+            if (GlobalIdServiceBean.testforNullTerminator(identifier)) {
+                return null;
+            }
+        } else {
+            logger.log(Level.INFO, "Error parsing identifier: {0}: '':<authority>/<identifier>'' not found in string",
+                    identifierString);
+            return null;
+        }
+        return parsePersistentId(protocol, authority, identifier);
+    }
+    
+    public GlobalId parsePersistentId(String protocol, String authority, String identifier) {
+        if(!isConfigured()) {
+            return null;
+        }
+        logger.fine("Parsing: " + protocol + ":" + authority + getSeparator() + identifier + " in " + getProviderInformation().get(0));
+        if(!GlobalIdServiceBean.isValidGlobalId(protocol, authority, identifier)) {
+            return null;
+        }
+        return new GlobalId(protocol, authority, identifier, getSeparator(), getUrlPrefix(),
+                getProviderInformation().get(0));
+    }
+
+    
+    public String getSeparator() {
+        //The standard default
+        return "/";
+    }
+
+    @Override
+    public String generateDataFileIdentifier(DataFile datafile) {
+        String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString");
+        String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, SystemConfig.DataFilePIDFormat.DEPENDENT.toString());
+        
+        String prepend = "";
+        if (doiDataFileFormat.equals(SystemConfig.DataFilePIDFormat.DEPENDENT.toString())){
+            //If format is dependent then pre-pend the dataset identifier 
+            prepend = datafile.getOwner().getIdentifier() + "/";
+            datafile.setProtocol(datafile.getOwner().getProtocol());
+            datafile.setAuthority(datafile.getOwner().getAuthority());
+        } else {
+            //If there's a shoulder prepend independent identifiers with it
+            prepend = settingsService.getValueForKey(SettingsServiceBean.Key.Shoulder, "");
+            datafile.setProtocol(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol));
+            datafile.setAuthority(settingsService.getValueForKey(SettingsServiceBean.Key.Authority));
+        }
+ 
+        switch (doiIdentifierType) {
+            case "randomString":
+                return generateIdentifierAsRandomString(datafile, prepend);
+            case "storedProcGenerated":
+                if (doiDataFileFormat.equals(SystemConfig.DataFilePIDFormat.INDEPENDENT.toString())){ 
+                    return generateIdentifierFromStoredProcedureIndependent(datafile, prepend);
+                } else {
+                    return generateIdentifierFromStoredProcedureDependent(datafile, prepend);
+                }
+            default:
+                /* Should we throw an exception instead?? -- L.A. 4.6.2 */
+                return generateIdentifierAsRandomString(datafile, prepend);
+        }
+    }
+    
+
+    /*
+     * This method checks locally for a DvObject with the same PID and if that is OK, checks with the PID service.
+     * @param dvo - the object to check (ToDo - get protocol/authority from this PidProvider object)
+     * @param prepend - for Datasets, this is always the shoulder, for DataFiles, it could be the shoulder or the parent Dataset identifier
+     */
+    private String generateIdentifierAsRandomString(DvObject dvo, String prepend) {
+        String identifier = null;
+        do {
+            identifier = prepend + RandomStringUtils.randomAlphanumeric(6).toUpperCase();
+        } while (!isGlobalIdUnique(new GlobalId(dvo.getProtocol(), dvo.getAuthority(), identifier, this.getSeparator(), this.getUrlPrefix(), this.getProviderInformation().get(0))));
+
+        return identifier;
+    }
+
+    /*
+     * This method checks locally for a DvObject with the same PID and if that is OK, checks with the PID service.
+     * @param dvo - the object to check (ToDo - get protocol/authority from this PidProvider object)
+     * @param prepend - for Datasets, this is always the shoulder, for DataFiles, it could be the shoulder or the parent Dataset identifier
+     */
+
+    private String generateIdentifierFromStoredProcedureIndependent(DvObject dvo, String prepend) {
+        String identifier; 
+        do {
+            String identifierFromStoredProcedure = dvObjectService.generateNewIdentifierByStoredProcedure();
+            // some diagnostics here maybe - is it possible to determine that it's failing 
+            // because the stored procedure hasn't been created in the database?
+            if (identifierFromStoredProcedure == null) {
+                return null; 
+            }
+            identifier = prepend + identifierFromStoredProcedure;
+        } while (!isGlobalIdUnique(new GlobalId(dvo.getProtocol(), dvo.getAuthority(), identifier, this.getSeparator(), this.getUrlPrefix(), this.getProviderInformation().get(0))));
+        
+        return identifier;
+    }
+    
+    /*This method is only used for DataFiles with DEPENDENT Pids. It is not for Datasets
+     * 
+     */
+    private String generateIdentifierFromStoredProcedureDependent(DataFile datafile, String prepend) {
+        String identifier;
+        Long retVal;
+        retVal = Long.valueOf(0L);
+      //ToDo - replace loops with one lookup for largest entry? (the do loop runs ~n**2/2 calls). The check for existingIdentifiers means this is mostly a local loop now, versus involving db or PidProvider calls, but still...)
+        
+        // This will catch identifiers already assigned in the current transaction (e.g.
+        // in FinalizeDatasetPublicationCommand) that haven't been committed to the db
+        // without having to make a call to the PIDProvider
+        Set<String> existingIdentifiers = new HashSet<String>();
+        List<DataFile> files = datafile.getOwner().getFiles();
+        for(DataFile f:files) {
+            existingIdentifiers.add(f.getIdentifier());
+        }
+        
+        do {
+            retVal++;
+            identifier = prepend + retVal.toString();
+
+        } while (existingIdentifiers.contains(identifier) || !isGlobalIdUnique(new GlobalId(datafile.getProtocol(), datafile.getAuthority(), identifier, this.getSeparator(), this.getUrlPrefix(), this.getProviderInformation().get(0))));
+
+        return identifier;
+    }
+
+    
     class GlobalIdMetadataTemplate {
 
 
@@ -159,7 +402,6 @@ public GlobalIdMetadataTemplate(){
 
     private String xmlMetadata;
     private String identifier;
-    private String datasetIdentifier;
     private List<String> datafileIdentifiers;
     private List<String> creators;
     private String title;
@@ -245,7 +487,7 @@ public String generateXML(DvObject dvObject) {
             // Added to prevent a NullPointerException when trying to destroy datasets when using DataCite rather than EZID.
             publisherYearFinal = this.publisherYear;
         }
-        xmlMetadata = template.replace("${identifier}", this.identifier.trim())
+        xmlMetadata = template.replace("${identifier}", getIdentifier().trim())
                 .replace("${title}", this.title)
                 .replace("${publisher}", this.publisher)
                 .replace("${publisherYear}", publisherYearFinal)
@@ -371,10 +613,6 @@ public void setIdentifier(String identifier) {
         this.identifier = identifier;
     }
 
-    public void setDatasetIdentifier(String datasetIdentifier) {
-        this.datasetIdentifier = datasetIdentifier;
-    }
-
     public List<String> getCreators() {
         return creators;
     }
@@ -428,10 +666,6 @@ public String getMetadataFromDvObject(String identifier, Map<String, String> met
             DataFile df = (DataFile) dvObject;
             String fileDescription = df.getDescription();
             metadataTemplate.setDescription(fileDescription == null ? "" : fileDescription);
-            String datasetPid = df.getOwner().getGlobalId().asString();
-            metadataTemplate.setDatasetIdentifier(datasetPid);
-        } else {
-            metadataTemplate.setDatasetIdentifier("");
         }
 
         metadataTemplate.setContacts(dataset.getLatestVersion().getDatasetContacts());
@@ -448,5 +682,19 @@ public String getMetadataFromDvObject(String identifier, Map<String, String> met
         logger.log(Level.FINE, "XML to send to DataCite: {0}", xmlMetadata);
         return xmlMetadata;
     }
+
+    @Override
+    public boolean canManagePID() {
+        //The default expectation is that PID providers are configured to manage some set (i.e. based on protocol/authority/shoulder) of PIDs
+        return true;
+    }
     
+    @Override
+    public boolean isConfigured() {
+        if(configured==null) {
+            return false;
+        } else {
+            return configured.booleanValue();
+        }
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java
index 6fc7262925a..db3c6029a78 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AlternativePersistentIdentifier.java
@@ -3,14 +3,14 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java b/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java
index 4838847e400..16ff4d266d8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ApiTokenPage.java
@@ -5,14 +5,14 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.api.Util;
-import java.sql.Timestamp;
+
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  * @todo Rename this to ApiTokenFragment? The separate page is being taken out
diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
index 344032ef5e3..d03ebbc6f7b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
@@ -4,16 +4,16 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.Serializable;
 import java.util.MissingResourceException;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedNativeQueries;
-import javax.persistence.NamedNativeQuery;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedNativeQueries;
+import jakarta.persistence.NamedNativeQuery;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
index 05f3e209632..363622ba3bf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
@@ -2,6 +2,7 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
+import edu.harvard.iq.dataverse.storageuse.StorageUseServiceBean;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 
@@ -14,19 +15,19 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
-import javax.ws.rs.ClientErrorException;
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.ServerErrorException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
+import jakarta.ws.rs.ClientErrorException;
+import jakarta.ws.rs.InternalServerErrorException;
+import jakarta.ws.rs.ServerErrorException;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 import org.apache.tika.Tika;
 
@@ -46,6 +47,8 @@ public class AuxiliaryFileServiceBean implements java.io.Serializable {
     @EJB
     private SystemConfig systemConfig;
     
+    @EJB
+    StorageUseServiceBean storageUseService; 
 
     public AuxiliaryFile find(Object pk) {
         return em.find(AuxiliaryFile.class, pk);
@@ -126,6 +129,13 @@ public AuxiliaryFile processAuxiliaryFile(InputStream fileInputStream, DataFile
                 }
                 dataFile.getAuxiliaryFiles().add(auxFile);
             }
+            // We've just added this file to storage; increment the StorageUse
+            // record if needed. 
+            if (auxFile.getFileSize() != null 
+                    && auxFile.getFileSize() > 0 
+                    && dataFile.getOwner() != null ) {
+                storageUseService.incrementStorageSizeRecursively(dataFile.getOwner().getId(), auxFile.getFileSize());
+            }
         } catch (IOException ioex) {
             logger.severe("IO Exception trying to save auxiliary file: " + ioex.getMessage());
             throw new InternalServerErrorException();
@@ -181,6 +191,7 @@ public void deleteAuxiliaryFile(DataFile dataFile, String formatTag, String form
         if (af == null) {
             throw new FileNotFoundException();
         }
+        Long auxFileSize = af.getFileSize();
         em.remove(af);
         StorageIO<?> storageIO;
         storageIO = dataFile.getStorageIO();
@@ -188,6 +199,14 @@ public void deleteAuxiliaryFile(DataFile dataFile, String formatTag, String form
         if (storageIO.isAuxObjectCached(auxExtension)) {
             storageIO.deleteAuxObject(auxExtension);
         }
+        // We've just deleted this file from storage; update the StorageUse
+        // record if needed. 
+        if (auxFileSize != null
+                && auxFileSize > 0
+                && dataFile.getOwner() != null) {
+            storageUseService.incrementStorageSizeRecursively(dataFile.getOwner().getId(), (0L - auxFileSize));
+        }
+        
     }
 
     public List<AuxiliaryFile> findAuxiliaryFiles(DataFile dataFile) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java
index 4f465168580..214e26965fa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessage.java
@@ -4,13 +4,13 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.Serializable;
 import java.util.Collection;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.OneToMany;
 
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java
index 91b4128c545..0e757998d58 100644
--- a/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessageServiceBean.java
@@ -10,10 +10,10 @@
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java b/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java
index dbae9a6dc27..ea2dd1b41fc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java
+++ b/src/main/java/edu/harvard/iq/dataverse/BannerMessageText.java
@@ -6,13 +6,13 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java b/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java
index 2b342b09610..68c8d49ad7e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CitationServlet.java
@@ -5,14 +5,15 @@
  */
 package edu.harvard.iq.dataverse;
 
-import edu.harvard.iq.dataverse.util.StringUtil;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
+
 import java.io.IOException;
-import java.io.PrintWriter;
-import javax.ejb.EJB;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+
+import jakarta.ejb.EJB;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServlet;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
@@ -21,7 +22,7 @@
 public class CitationServlet extends HttpServlet {
 
     @EJB
-    DatasetServiceBean datasetService;    
+    DvObjectServiceBean dvObjectService;
     
     /**
      * Processes requests for both HTTP <code>GET</code> and <code>POST</code>
@@ -37,10 +38,14 @@ protected void processRequest(HttpServletRequest request, HttpServletResponse re
         
         String persistentId = request.getParameter("persistentId");
         if (persistentId != null) {
-            Dataset ds = datasetService.findByGlobalId(persistentId);        
-            if (ds != null) {
-                response.sendRedirect("dataset.xhtml?persistentId=" + persistentId);
-                return;        
+            DvObject dob = dvObjectService.findByGlobalId(PidUtil.parseAsGlobalID(persistentId));
+            if (dob != null) {
+                if (dob instanceof Dataset) {
+                    response.sendRedirect("dataset.xhtml?persistentId=" + persistentId);
+                } else if (dob instanceof DataFile) {
+                    response.sendRedirect("file.xhtml?persistentId=" + persistentId);
+                }
+                return;
             }
         }
         response.sendError(HttpServletResponse.SC_NOT_FOUND);
diff --git a/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java
index d51a73fd2dc..bf509c33995 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ConfigureFragmentBean.java
@@ -16,10 +16,10 @@
 
 import java.sql.Timestamp;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.util.Date;
 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java
index 5d5d9597746..9542cfe3f71 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabAlternate.java
@@ -7,15 +7,15 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java
index 181d939f4a1..5dcce98a90f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValue.java
@@ -17,16 +17,16 @@
 import java.util.Objects;
 import java.util.logging.Logger;
 import java.util.MissingResourceException;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java
index 1d530e136ba..eadc13721b3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueConverter.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java
index 0e9501414d0..4255c3b2dbc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ControlledVocabularyValueServiceBean.java
@@ -6,11 +6,11 @@
 package edu.harvard.iq.dataverse;
 
 import java.util.List;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
index 64723fff79a..2cb6f27c3e4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestion.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse;
 import java.io.Serializable;
 import java.util.List;
-import javax.persistence.*;
+import jakarta.persistence.*;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java
index 32af06014a7..f19ee3c3fc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionResponse.java
@@ -7,8 +7,8 @@
 
 import java.io.Serializable;
 import java.util.List;
-import javax.faces.model.SelectItem;
-import javax.persistence.*;
+import jakarta.faces.model.SelectItem;
+import jakarta.persistence.*;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java
index a5329c8b96d..f3a6b83b53f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomQuestionValue.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.*;
+import jakarta.persistence.*;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java b/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java
index 713d365ba0f..9dd524127d7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/CustomizationFilesServlet.java
@@ -14,13 +14,13 @@
 import java.io.PrintWriter;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.servlet.ServletException;
-import javax.servlet.annotation.WebServlet;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.annotation.WebServlet;
+import jakarta.servlet.http.HttpServlet;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import org.apache.commons.io.IOUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java
index 7ccd4adb78f..7c75b1a4da6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterCache.java
@@ -7,14 +7,14 @@
 
 
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Lob;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Lob;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
index 218e4c85474..9ecc4a3ecc9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteRegisterService.java
@@ -18,11 +18,13 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import org.apache.commons.text.StringEscapeUtils;
 import org.jsoup.Jsoup;
 import org.jsoup.nodes.Document;
@@ -53,7 +55,11 @@ public class DOIDataCiteRegisterService {
     
     private DataCiteRESTfullClient getClient() throws IOException {
         if (client == null) {
-            client = new DataCiteRESTfullClient(System.getProperty("doi.baseurlstring"), System.getProperty("doi.username"), System.getProperty("doi.password"));
+            client = new DataCiteRESTfullClient(
+                JvmSettings.DATACITE_MDS_API_URL.lookup(),
+                JvmSettings.DATACITE_USERNAME.lookup(),
+                JvmSettings.DATACITE_PASSWORD.lookup()
+            );
         }
         return client;
     }
@@ -546,7 +552,7 @@ private String generateRelatedIdentifiers(DvObject dvObject) {
 
                 datafileIdentifiers = new ArrayList<>();
                 for (DataFile dataFile : dataset.getFiles()) {
-                    if (!dataFile.getGlobalId().asString().isEmpty()) {
+                    if (dataFile.getGlobalId() != null) {
                         if (sb.toString().isEmpty()) {
                             sb.append("<relatedIdentifiers>");
                         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java
index e7dd49a6926..48786b41824 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIDataCiteServiceBean.java
@@ -3,16 +3,17 @@
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URL;
-import java.util.ArrayList;
 import java.util.Base64;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
 
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import org.apache.commons.httpclient.HttpException;
 import org.apache.commons.httpclient.HttpStatus;
 
@@ -22,7 +23,7 @@
  * @author luopc
  */
 @Stateless
-public class DOIDataCiteServiceBean extends AbstractGlobalIdServiceBean {
+public class DOIDataCiteServiceBean extends DOIServiceBean {
 
     private static final Logger logger = Logger.getLogger(DOIDataCiteServiceBean.class.getCanonicalName());
     
@@ -34,41 +35,30 @@ public class DOIDataCiteServiceBean extends AbstractGlobalIdServiceBean {
     @EJB
     DOIDataCiteRegisterService doiDataCiteRegisterService;
 
-    public DOIDataCiteServiceBean() {
-    }
-
     @Override
     public boolean registerWhenPublished() {
         return false;
     }
 
-    @Override
-    public boolean alreadyExists(DvObject dvObject) {
-        if(dvObject==null) {
-            logger.severe("Null DvObject sent to alreadyExists().");
-            return false;
-        }
-        return alreadyExists(dvObject.getGlobalId());
-    }
+
 
     @Override
-    public boolean alreadyExists(GlobalId pid) {
-        logger.log(Level.FINE,"alreadyExists");
+    public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) {
+        logger.log(Level.FINE,"alreadyRegistered");
         if(pid==null || pid.asString().isEmpty()) {
             logger.fine("No identifier sent.");
             return false;
         }
-        boolean alreadyExists;
+        boolean alreadyRegistered;
         String identifier = pid.asString();
         try{
-            alreadyExists = doiDataCiteRegisterService.testDOIExists(identifier); 
+            alreadyRegistered = doiDataCiteRegisterService.testDOIExists(identifier); 
         } catch (Exception e){
-            logger.log(Level.WARNING, "alreadyExists failed");
+            logger.log(Level.WARNING, "alreadyRegistered failed");
             return false;
         }
-        return  alreadyExists;
+        return  alreadyRegistered;
     }
-    
 
     @Override
     public String createIdentifier(DvObject dvObject) throws Exception {
@@ -90,10 +80,10 @@ public String createIdentifier(DvObject dvObject) throws Exception {
     }
 
     @Override
-    public HashMap getIdentifierMetadata(DvObject dvObject) {
+    public Map<String, String> getIdentifierMetadata(DvObject dvObject) {
         logger.log(Level.FINE,"getIdentifierMetadata");
         String identifier = getIdentifier(dvObject);
-        HashMap<String, String> metadata = new HashMap<>();
+        Map<String, String> metadata = new HashMap<>();
         try {
             metadata = doiDataCiteRegisterService.getMetadata(identifier);
         } catch (Exception e) {
@@ -103,29 +93,6 @@ public HashMap getIdentifierMetadata(DvObject dvObject) {
     }
     
 
-    /**
-     * Looks up the metadata for a Global Identifier
-     * @param protocol the identifier system, e.g. "doi"
-     * @param authority the namespace that the authority manages in the identifier system
-     * @param identifier the local identifier part
-     * @return a Map of metadata. It is empty when the lookup failed, e.g. when
-     * the identifier does not exist.
-     */
-    @Override
-    public HashMap<String, String> lookupMetadataFromIdentifier(String protocol, String authority, String identifier) {
-        logger.log(Level.FINE,"lookupMetadataFromIdentifier");
-        String identifierOut = getIdentifierForLookup(protocol, authority, identifier);
-        HashMap<String, String> metadata = new HashMap<>();
-        try {
-            metadata = doiDataCiteRegisterService.getMetadata(identifierOut);
-        } catch (Exception e) {
-            logger.log(Level.WARNING, "None existing so we can use this identifier");
-            logger.log(Level.WARNING, "identifier: {0}", identifierOut);
-        }
-        return metadata;
-    }
-
-
     /**
      * Modifies the DOI metadata for a Dataset
      * @param dvObject the dvObject whose metadata needs to be modified
@@ -219,9 +186,9 @@ public void deleteIdentifier(DvObject dvObject) throws IOException, HttpExceptio
     private void deleteDraftIdentifier(DvObject dvObject) throws IOException {
     	
     	//ToDo - incorporate into DataCiteRESTfulClient
-        String baseUrl = systemConfig.getDataCiteRestApiUrlString();
-        String username = System.getProperty("doi.username");
-        String password = System.getProperty("doi.password");
+        String baseUrl = JvmSettings.DATACITE_REST_API_URL.lookup();
+        String username = JvmSettings.DATACITE_USERNAME.lookup();
+        String password = JvmSettings.DATACITE_PASSWORD.lookup();
         GlobalId doi = dvObject.getGlobalId();
         /**
          * Deletes the DOI from DataCite if it can. Returns 204 if PID was deleted
@@ -269,13 +236,13 @@ public boolean publicizeIdentifier(DvObject dvObject) {
     
     @Override
     public List<String> getProviderInformation(){
-        ArrayList <String> providerInfo = new ArrayList<>();
-        String providerName = "DataCite";
-        String providerLink = "http://status.datacite.org";
-        providerInfo.add(providerName);
-        providerInfo.add(providerLink);
-        return providerInfo;
+        return List.of("DataCite", "https://status.datacite.org");
     }
 
 
+
+    @Override
+    protected String getProviderKeyName() {
+        return "DataCite";
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java
index d21caf32411..86b74b72f30 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIEZIdServiceBean.java
@@ -1,44 +1,57 @@
 package edu.harvard.iq.dataverse;
 
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.ucsb.nceas.ezid.EZIDException;
 import edu.ucsb.nceas.ezid.EZIDService;
-import edu.ucsb.nceas.ezid.EZIDServiceRequest;
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
+
+import jakarta.ejb.Stateless;
 
 /**
  *
  * @author skraffmiller
  */
 @Stateless
-public class DOIEZIdServiceBean extends AbstractGlobalIdServiceBean {
-
+public class DOIEZIdServiceBean extends DOIServiceBean {
+    
+    private static final Logger logger = Logger.getLogger(DOIEZIdServiceBean.class.getCanonicalName());
+    
     EZIDService ezidService;
-    EZIDServiceRequest ezidServiceRequest;
-    String baseURLString = "https://ezid.cdlib.org";
-    private static final Logger logger = Logger.getLogger("edu.harvard.iq.dvn.core.index.DOIEZIdServiceBean");
-
-    // get username and password from system properties
-    private String USERNAME = "";
-    private String PASSWORD = "";
-
+    
+    // This has a sane default in microprofile-config.properties
+    private final String baseUrl = JvmSettings.EZID_API_URL.lookup();
+    
     public DOIEZIdServiceBean() {
-        logger.log(Level.FINE,"Constructor");
-        baseURLString = System.getProperty("doi.baseurlstring");
-        ezidService = new EZIDService(baseURLString);
-        USERNAME = System.getProperty("doi.username");
-        PASSWORD = System.getProperty("doi.password");
-        logger.log(Level.FINE, "Using baseURLString {0}", baseURLString);
+        // Creating the service doesn't do any harm, just initializing some object data here.
+        // Makes sure we don't run into NPEs from the other methods, but will obviously fail if the
+        // login below does not work.
+        this.ezidService = new EZIDService(this.baseUrl);
+        
         try {
-            ezidService.login(USERNAME, PASSWORD);
+            // These have (obviously) no default, but still are optional to make the provider optional
+            String username = JvmSettings.EZID_USERNAME.lookupOptional().orElse(null);
+            String password = JvmSettings.EZID_PASSWORD.lookupOptional().orElse(null);
+            
+            if (username != null ^ password != null) {
+                logger.log(Level.WARNING, "You must give both username and password. Will not try to login.");
+            }
+            
+            if (username != null && password != null) {
+                this.ezidService.login(username, password);
+                this.configured = true;
+            }
         } catch (EZIDException e) {
-            logger.log(Level.WARNING, "login failed ");
+            // We only do the warnings here, but the object still needs to be created.
+            // The EJB stateless thing expects this to go through, and it is requested on any
+            // global id parsing.
+            logger.log(Level.WARNING, "Login failed to {0}", this.baseUrl);
             logger.log(Level.WARNING, "Exception String: {0}", e.toString());
-            logger.log(Level.WARNING, "localized message: {0}", e.getLocalizedMessage());
-            logger.log(Level.WARNING, "cause: ", e.getCause());
-            logger.log(Level.WARNING, "message {0}", e.getMessage());
+            logger.log(Level.WARNING, "Localized message: {0}", e.getLocalizedMessage());
+            logger.log(Level.WARNING, "Cause:", e.getCause());
+            logger.log(Level.WARNING, "Message {0}", e.getMessage());
+        // TODO: is this antipattern really necessary?
         } catch (Exception e) {
             logger.log(Level.SEVERE, "Other Error on ezidService.login(USERNAME, PASSWORD) - not EZIDException ", e.getMessage());
         }
@@ -50,19 +63,10 @@ public boolean registerWhenPublished() {
     }
 
     @Override
-    public boolean alreadyExists(DvObject dvObject) throws Exception {
-        if(dvObject==null) {
-            logger.severe("Null DvObject sent to alreadyExists().");
-            return false;
-        }
-        return alreadyExists(dvObject.getGlobalId());
-    }
-    
-    @Override
-    public boolean alreadyExists(GlobalId pid) throws Exception {
-        logger.log(Level.FINE,"alreadyExists");
+    public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) throws Exception {
+        logger.log(Level.FINE,"alreadyRegistered");
         try {
-            HashMap<String, String> result = ezidService.getMetadata(pid.asString());            
+            HashMap<String, String> result = ezidService.getMetadata(pid.asString());
             return result != null && !result.isEmpty();
             // TODO just check for HTTP status code 200/404, sadly the status code is swept under the carpet
         } catch (EZIDException e ){
@@ -74,7 +78,7 @@ public boolean alreadyExists(GlobalId pid) throws Exception {
             if (e.getLocalizedMessage().contains("no such identifier")){
                 return false;
             }
-            logger.log(Level.WARNING, "alreadyExists failed");
+            logger.log(Level.WARNING, "alreadyRegistered failed");
             logger.log(Level.WARNING, "getIdentifier(dvObject) {0}", pid.asString());
             logger.log(Level.WARNING, "String {0}", e.toString());
             logger.log(Level.WARNING, "localized message {0}", e.getLocalizedMessage());
@@ -102,32 +106,6 @@ public Map<String, String> getIdentifierMetadata(DvObject dvObject) {
         return metadata;
     }
 
-    /**
-     * Looks up the metadata for a Global Identifier
-     *
-     * @param protocol the identifier system, e.g. "doi"
-     * @param authority the namespace that the authority manages in the
-     * identifier system
-     * identifier part
-     * @param identifier the local identifier part
-     * @return a Map of metadata. It is empty when the lookup failed, e.g. when
-     * the identifier does not exist.
-     */
-    @Override
-    public HashMap<String, String> lookupMetadataFromIdentifier(String protocol, String authority, String identifier) {
-        logger.log(Level.FINE,"lookupMetadataFromIdentifier");
-        String identifierOut = getIdentifierForLookup(protocol, authority, identifier);
-        HashMap<String, String> metadata = new HashMap<>();
-        try {
-            metadata = ezidService.getMetadata(identifierOut);
-        } catch (EZIDException e) {
-            logger.log(Level.FINE, "None existing so we can use this identifier");
-            logger.log(Level.FINE, "identifier: {0}", identifierOut);
-            return metadata;
-        }
-        return metadata;
-    }
-
     /**
      * Modifies the EZID metadata for a Dataset
      *
@@ -249,12 +227,7 @@ private boolean updateIdentifierStatus(DvObject dvObject, String statusIn) {
     
     @Override
     public List<String> getProviderInformation(){
-        ArrayList <String> providerInfo = new ArrayList<>();
-        String providerName = "EZID";
-        String providerLink = baseURLString;
-        providerInfo.add(providerName);
-        providerInfo.add(providerLink);
-        return providerInfo;
+        return List.of("EZID", this.baseUrl);
     }
 
     @Override
@@ -301,5 +274,10 @@ private <T> HashMap<T,T> asHashMap(Map<T,T> map) {
         return (map instanceof HashMap) ? (HashMap)map : new HashMap<>(map);
     }
 
+    @Override
+    protected String getProviderKeyName() {
+        return "EZID";
+    }
+
 }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/DOIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DOIServiceBean.java
new file mode 100644
index 00000000000..0182c745cd0
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/DOIServiceBean.java
@@ -0,0 +1,78 @@
+package edu.harvard.iq.dataverse;
+
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
+
+public abstract class DOIServiceBean extends AbstractGlobalIdServiceBean {
+
+    public static final String DOI_PROTOCOL = "doi";
+    public static final String DOI_RESOLVER_URL = "https://doi.org/";
+    public static final String HTTP_DOI_RESOLVER_URL = "http://doi.org/";
+    public static final String DXDOI_RESOLVER_URL = "https://dx.doi.org/";
+    public static final String HTTP_DXDOI_RESOLVER_URL = "http://dx.doi.org/";
+
+    public DOIServiceBean() {
+        super();
+    }
+
+    @Override
+    public GlobalId parsePersistentId(String pidString) {
+        if (pidString.startsWith(DOI_RESOLVER_URL)) {
+            pidString = pidString.replace(DOI_RESOLVER_URL,
+                    (DOI_PROTOCOL + ":"));
+        } else if (pidString.startsWith(HTTP_DOI_RESOLVER_URL)) {
+            pidString = pidString.replace(HTTP_DOI_RESOLVER_URL,
+                    (DOI_PROTOCOL + ":"));
+        } else if (pidString.startsWith(DXDOI_RESOLVER_URL)) {
+            pidString = pidString.replace(DXDOI_RESOLVER_URL,
+                    (DOI_PROTOCOL + ":"));
+        }
+        return super.parsePersistentId(pidString);
+    }
+
+    @Override
+    public GlobalId parsePersistentId(String protocol, String identifierString) {
+
+        if (!DOI_PROTOCOL.equals(protocol)) {
+            return null;
+        }
+        GlobalId globalId = super.parsePersistentId(protocol, identifierString);
+        if (globalId!=null && !GlobalIdServiceBean.checkDOIAuthority(globalId.getAuthority())) {
+            return null;
+        }
+        return globalId;
+    }
+    
+    @Override
+    public GlobalId parsePersistentId(String protocol, String authority, String identifier) {
+
+        if (!DOI_PROTOCOL.equals(protocol)) {
+            return null;
+        }
+        return super.parsePersistentId(protocol, authority, identifier);
+    }
+
+    public String getUrlPrefix() {
+        return DOI_RESOLVER_URL;
+    }
+
+    @Override
+    public boolean isConfigured() {
+        if (configured == null) {
+            if (getProviderKeyName() == null) {
+                configured = false;
+            } else {
+                String doiProvider = settingsService.getValueForKey(Key.DoiProvider, "");
+                if (getProviderKeyName().equals(doiProvider)) {
+                    configured = true;
+                } else if (!doiProvider.isEmpty()) {
+                    configured = false;
+                }
+            }
+        }
+        return super.isConfigured();
+    }
+
+    protected String getProviderKeyName() {
+        return null;
+    }
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java
index 99c7951c96e..c37c3f52bc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DashboardPage.java
@@ -5,23 +5,21 @@
  */
 package edu.harvard.iq.dataverse;
 
-import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAISet;
 import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
-import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
index abe3cc3e6d7..9b4b89db44f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataCitation.java
@@ -14,7 +14,6 @@
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
-import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
@@ -27,7 +26,7 @@
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
-import javax.ejb.EJBException;
+import jakarta.ejb.EJBException;
 import javax.xml.stream.XMLOutputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
@@ -57,7 +56,7 @@ public class DataCitation {
     private String publisher;
     private boolean direct;
     private List<String> funders;
-    private String seriesTitle;
+    private List<String> seriesTitles;
     private String description;
     private List<String> datesOfCollection;
     private List<String> keywords;
@@ -135,7 +134,7 @@ private void getCommonValuesFrom(DatasetVersion dsv) {
 
         datesOfCollection = dsv.getDatesOfCollection();
         title = dsv.getTitle();
-        seriesTitle = dsv.getSeriesTitle();
+        seriesTitles = dsv.getSeriesTitles();
         keywords = dsv.getKeywords();
         languages = dsv.getLanguages();
         spatialCoverages = dsv.getSpatialCoverages();
@@ -207,7 +206,7 @@ public String toString(boolean html, boolean anonymized) {
 
         if (persistentId != null) {
         	// always show url format
-            citationList.add(formatURL(persistentId.toURL().toString(), persistentId.toURL().toString(), html)); 
+            citationList.add(formatURL(persistentId.asURL(), persistentId.asURL(), html)); 
         }
         citationList.add(formatString(publisher, html));
         citationList.add(version);
@@ -298,7 +297,7 @@ public void writeAsBibtexCitation(OutputStream os) throws IOException {
         out.write(persistentId.getIdentifier());
         out.write("},\r\n");
         out.write("url = {");
-        out.write(persistentId.toURL().toString());
+        out.write(persistentId.asURL());
         out.write("}\r\n");
         out.write("}\r\n");
         out.flush();
@@ -330,8 +329,10 @@ public void writeAsRISCitation(OutputStream os) throws IOException {
             out.write("TY  - DATA" + "\r\n");
             out.write("T1  - " + getTitle() + "\r\n");
         }
-        if (seriesTitle != null) {
-            out.write("T3  - " + seriesTitle + "\r\n");
+        if (seriesTitles != null) {
+            for (String seriesTitle : seriesTitles) {
+                out.write("T3  - " + seriesTitle + "\r\n");
+            }
         }
         /* Removing abstract/description per Request from G. King in #3759
         if(description!=null) {
@@ -387,7 +388,7 @@ public void writeAsRISCitation(OutputStream os) throws IOException {
         
         out.write("SE  - " + date + "\r\n");
 
-        out.write("UR  - " + persistentId.toURL().toString() + "\r\n");
+        out.write("UR  - " + persistentId.asURL() + "\r\n");
         out.write("PB  - " + publisher + "\r\n");
 
         // a DataFile citation also includes filename und UNF, if applicable:
@@ -505,12 +506,22 @@ private void createEndNoteXML(XMLStreamWriter xmlw) throws XMLStreamException {
         xmlw.writeCharacters(title);
         xmlw.writeEndElement(); // title
         }
-        
-        if (seriesTitle != null) {
-            xmlw.writeStartElement("tertiary-title");
-            xmlw.writeCharacters(seriesTitle);
+
+        /*
+        If I say just !"isEmpty" for series titles I get a failure 
+        on testToEndNoteString_withoutTitleAndAuthor
+        with a null pointer on build -SEK 3/31/23
+        */
+        if (seriesTitles != null && !seriesTitles.isEmpty() ) {
+            xmlw.writeStartElement("tertiary-titles");
+            for (String seriesTitle : seriesTitles){
+                xmlw.writeStartElement("tertiary-title");
+                xmlw.writeCharacters(seriesTitle);
+                xmlw.writeEndElement(); // tertiary-title
+            }
             xmlw.writeEndElement(); // tertiary-title
         }
+        
         xmlw.writeEndElement(); // titles
 
         xmlw.writeStartElement("section");
@@ -584,7 +595,7 @@ private void createEndNoteXML(XMLStreamWriter xmlw) throws XMLStreamException {
         xmlw.writeStartElement("urls");
         xmlw.writeStartElement("related-urls");
         xmlw.writeStartElement("url");
-        xmlw.writeCharacters(getPersistentId().toURL().toString());
+        xmlw.writeCharacters(getPersistentId().asURL());
         xmlw.writeEndElement(); // url
         xmlw.writeEndElement(); // related-urls
         xmlw.writeEndElement(); // urls
@@ -781,18 +792,13 @@ private GlobalId getPIDFrom(DatasetVersion dsv, DvObject dv) {
                 || HarvestingClient.HARVEST_STYLE_ICPSR.equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())
                 || HarvestingClient.HARVEST_STYLE_DATAVERSE
                         .equals(dsv.getDataset().getHarvestedFrom().getHarvestStyle())) {
-                // creating a global id like this:
-                // persistentId = new GlobalId(dv.getGlobalId());
-                // you end up doing new GlobalId((New GlobalId(dv)).toString())
-                // - doing an extra formatting-and-parsing-again
-                // This achieves the same thing:
                 if(!isDirect()) {
                 if (!StringUtils.isEmpty(dsv.getDataset().getIdentifier())) {
-                    return new GlobalId(dsv.getDataset());
+                    return dsv.getDataset().getGlobalId();
                 }
                 } else {
                 if (!StringUtils.isEmpty(dv.getIdentifier())) {
-                    return new GlobalId(dv);
+                    return dv.getGlobalId();
                 }
             }
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 5171e8d49f2..3d8086b142b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -5,12 +5,11 @@
 import com.google.gson.JsonElement;
 import com.google.gson.JsonObject;
 import com.google.gson.annotations.Expose;
-import com.google.gson.annotations.SerializedName;
 import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
+import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
-import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
 import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker;
 import edu.harvard.iq.dataverse.ingest.IngestReport;
 import edu.harvard.iq.dataverse.ingest.IngestRequest;
@@ -22,19 +21,18 @@
 import java.util.List;
 import java.util.ArrayList;
 import java.util.Objects;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.Files;
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.persistence.*;
-import javax.validation.constraints.Pattern;
-import org.hibernate.validator.constraints.NotBlank;
+import java.util.stream.Collectors;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.persistence.*;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.NotBlank;
 
 /**
  *
@@ -47,16 +45,16 @@
                 query = "SELECT o FROM DataFile o WHERE o.creator.id=:creatorId"),
         @NamedQuery(name = "DataFile.findByReleaseUserId",
                 query = "SELECT o FROM DataFile o WHERE o.releaseUser.id=:releaseUserId"),
-        @NamedQuery(name="DataFile.findDataFileByIdProtocolAuth", 
+        @NamedQuery(name="DataFile.findDataFileByIdProtocolAuth",
                 query="SELECT s FROM DataFile s WHERE s.identifier=:identifier AND s.protocol=:protocol AND s.authority=:authority"),
-        @NamedQuery(name="DataFile.findDataFileThatReplacedId", 
+        @NamedQuery(name="DataFile.findDataFileThatReplacedId",
                 query="SELECT s.id FROM DataFile s WHERE s.previousDataFileId=:identifier")
 })
 @Entity
 @Table(indexes = {@Index(columnList="ingeststatus")
-		, @Index(columnList="checksumvalue")
-		, @Index(columnList="contenttype")
-		, @Index(columnList="restricted")})
+        , @Index(columnList="checksumvalue")
+        , @Index(columnList="contenttype")
+        , @Index(columnList="restricted")})
 public class DataFile extends DvObject implements Comparable {
     private static final Logger logger = Logger.getLogger(DatasetPage.class.getCanonicalName());
     private static final long serialVersionUID = 1L;
@@ -73,7 +71,6 @@ public class DataFile extends DvObject implements Comparable {
     @Column( nullable = false )
     @Pattern(regexp = "^.*/.*$", message = "{contenttype.slash}")
     private String contentType;
-    
 
 //    @Expose    
 //    @SerializedName("storageIdentifier")
@@ -198,6 +195,28 @@ public String toString() {
     @OneToMany(mappedBy="dataFile", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
     private List<GuestbookResponse> guestbookResponses;
 
+    @OneToMany(mappedBy="dataFile",fetch = FetchType.LAZY,cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH})
+    private List<FileAccessRequest> fileAccessRequests;
+
+    @ManyToMany
+    @JoinTable(name = "fileaccessrequests",
+    joinColumns = @JoinColumn(name = "datafile_id"),
+    inverseJoinColumns = @JoinColumn(name = "authenticated_user_id"))
+    private List<AuthenticatedUser> fileAccessRequesters;
+
+    
+    public List<FileAccessRequest> getFileAccessRequests(){
+        return fileAccessRequests;
+    }
+    
+    public List<FileAccessRequest> getFileAccessRequests(FileAccessRequest.RequestState state){
+        return fileAccessRequests.stream().filter(far -> far.getState() == state).collect(Collectors.toList());
+    }
+
+    public void setFileAccessRequests(List<FileAccessRequest> fARs){
+        this.fileAccessRequests = fARs;
+    }
+    
     public List<GuestbookResponse> getGuestbookResponses() {
         return guestbookResponses;
     }
@@ -367,7 +386,17 @@ public JsonArrayBuilder getTagLabelsAsJsonArrayBuilder(){
     public void setTags(List<DataFileTag> dataFileTags) {
         this.dataFileTags = dataFileTags;
     }
-    
+
+    public void addUniqueTagByLabel(String tagLabel) throws IllegalArgumentException {
+        if (tagExists(tagLabel)) {
+            return;
+        }
+        DataFileTag tag = new DataFileTag();
+        tag.setTypeByLabel(tagLabel);
+        tag.setDataFile(this);
+        addTag(tag);
+    }
+
     public void addTag(DataFileTag tag) {
         if (dataFileTags == null) {
             dataFileTags = new ArrayList<>();
@@ -416,7 +445,7 @@ public String getIngestReportMessage() {
                 return ingestReports.get(0).getReport();
             }
         }
-        return "Ingest failed. No further information is available.";
+        return BundleUtil.getStringFromBundle("file.ingestFailed");
     }
     
     public boolean isTabularData() {
@@ -611,7 +640,7 @@ public String getFriendlySize() {
             return BundleUtil.getStringFromBundle("file.sizeNotAvailable");
         }
     }
-
+    
     public boolean isRestricted() {
         return restricted;
     }
@@ -747,13 +776,6 @@ public String getUnf() {
         }
         return null; 
     }
-    
-
-    @ManyToMany
-    @JoinTable(name = "fileaccessrequests",
-    joinColumns = @JoinColumn(name = "datafile_id"),
-    inverseJoinColumns = @JoinColumn(name = "authenticated_user_id"))
-    private List<AuthenticatedUser> fileAccessRequesters;
 
     public List<AuthenticatedUser> getFileAccessRequesters() {
         return fileAccessRequesters;
@@ -762,7 +784,51 @@ public List<AuthenticatedUser> getFileAccessRequesters() {
     public void setFileAccessRequesters(List<AuthenticatedUser> fileAccessRequesters) {
         this.fileAccessRequesters = fileAccessRequesters;
     }
-    
+
+
+    public void addFileAccessRequest(FileAccessRequest request) {
+        if (this.fileAccessRequests == null) {
+            this.fileAccessRequests = new ArrayList<>();
+        }
+
+        this.fileAccessRequests.add(request);
+    }
+
+    public FileAccessRequest getAccessRequestForAssignee(RoleAssignee roleAssignee) {
+        if (this.fileAccessRequests == null) {
+            return null;
+        }
+
+        return this.fileAccessRequests.stream()
+                .filter(fileAccessRequest -> fileAccessRequest.getRequester().equals(roleAssignee) && fileAccessRequest.isStateCreated()).findFirst()
+                .orElse(null);
+    }
+
+    public boolean removeFileAccessRequest(FileAccessRequest request) {
+        if (this.fileAccessRequests == null) {
+            return false;
+        }
+
+        if (request != null) {
+            this.fileAccessRequests.remove(request);
+            return true;
+        }
+
+        return false;
+    }
+
+    public boolean containsActiveFileAccessRequestFromUser(RoleAssignee roleAssignee) {
+        if (this.fileAccessRequests == null) {
+            return false;
+        }
+
+        Set<AuthenticatedUser> existingUsers = getFileAccessRequests(FileAccessRequest.RequestState.CREATED).stream()
+            .map(FileAccessRequest::getRequester)
+            .collect(Collectors.toSet());
+
+        return existingUsers.contains(roleAssignee);
+    }
+
     public boolean isHarvested() {
         
         Dataset ownerDataset = this.getOwner();
@@ -924,8 +990,6 @@ public String toJSON(){
     
     public JsonObject asGsonObject(boolean prettyPrint){
         
-        String overarchingKey = "data";
-        
         GsonBuilder builder;
         if (prettyPrint){  // Add pretty printing
             builder = new GsonBuilder().excludeFieldsWithoutExposeAnnotation().setPrettyPrinting();
@@ -956,7 +1020,7 @@ public JsonObject asGsonObject(boolean prettyPrint){
         // https://github.com/IQSS/dataverse/issues/761, https://github.com/IQSS/dataverse/issues/2110, https://github.com/IQSS/dataverse/issues/3191
         //
         datasetMap.put("title", thisFileMetadata.getDatasetVersion().getTitle());
-        datasetMap.put("persistentId", getOwner().getGlobalIdString());
+        datasetMap.put("persistentId", getOwner().getGlobalId().asString());
         datasetMap.put("url", getOwner().getPersistentURL());
         datasetMap.put("version", thisFileMetadata.getDatasetVersion().getSemanticVersion());
         datasetMap.put("id", getOwner().getId());
@@ -1034,9 +1098,17 @@ public String getCreateDateFormattedYYYYMMDD() {
         return null;
     }
     
+    @Override
+    public String getTargetUrl() {
+        return DataFile.TARGET_URL;
+    }
 
+    private boolean tagExists(String tagLabel) {
+        for (DataFileTag dataFileTag : dataFileTags) {
+            if (dataFileTag.getTypeLabel().equals(tagLabel)) {
+                return true;
+            }
+        }
+        return false;
+    }
 } // end of class
-    
-
-    
-
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java b/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
index f569a69b13a..f5abe9ac78a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileCategory.java
@@ -10,16 +10,16 @@
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Collection;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java
index 3fa4691a6dd..29dcb22c3ec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBean.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java
index 18531f5203d..701e826f12e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileConverter.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse;
 
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 @FacesConverter("dataFileConverter")
 public class DataFileConverter implements Converter {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
index 7da06f36be4..c9d50bbed9d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileServiceBean.java
@@ -1,6 +1,5 @@
 package edu.harvard.iq.dataverse;
 
-import edu.harvard.iq.dataverse.authorization.AccessRequest;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
@@ -9,6 +8,9 @@
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.search.SolrSearchResult;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.storageuse.StorageQuota;
+import edu.harvard.iq.dataverse.storageuse.StorageUseServiceBean;
+import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
 import edu.harvard.iq.dataverse.util.FileSortFieldAndOrder;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
@@ -27,25 +29,21 @@
 import java.util.UUID;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
-import org.apache.commons.lang3.RandomStringUtils;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
  * @author Leonid Andreev
  * 
- * Basic skeleton of the new DataFile service for DVN 4.0
- * 
  */
 
 @Stateless
@@ -67,13 +65,18 @@ public class DataFileServiceBean implements java.io.Serializable {
 
     @EJB EmbargoServiceBean embargoService;
     
+    @EJB SystemConfig systemConfig;
+    
+    @EJB
+    StorageUseServiceBean storageUseService; 
+    
     @PersistenceContext(unitName = "VDCNet-ejbPU")
     private EntityManager em;
     
     // Assorted useful mime types:
     
     // 3rd-party and/or proprietary tabular data formasts that we know
-    // how to ingest: 
+    // how to ingest:
     
     private static final String MIME_TYPE_STATA = "application/x-stata";
     private static final String MIME_TYPE_STATA13 = "application/x-stata-13";
@@ -154,8 +157,29 @@ public DataFile find(Object pk) {
         
     }*/
     
+    public List<DataFile> findAll(List<Long> fileIds){
+        List<DataFile> dataFiles = new ArrayList<>();
+
+         for (Long fileId : fileIds){
+             dataFiles.add(find(fileId));
+         }
+
+        return dataFiles;
+    }
+
+    public List<DataFile> findAll(String fileIdsAsString){
+        ArrayList<Long> dataFileIds = new ArrayList<>();
+
+        String[] fileIds = fileIdsAsString.split(",");
+        for (String fId : fileIds){
+            dataFileIds.add(Long.parseLong(fId));
+        }
+
+        return findAll(dataFileIds);
+    }
+    
     public DataFile findByGlobalId(String globalId) {
-            return (DataFile) dvObjectService.findByGlobalId(globalId, DataFile.DATAFILE_DTYPE_STRING);
+            return (DataFile) dvObjectService.findByGlobalId(globalId, DvObject.DType.DataFile);
     }
 
     public List<DataFile> findByCreatorId(Long creatorId) {
@@ -199,6 +223,18 @@ public List<DataFile> findByDatasetId(Long studyId) {
                 .setParameter("studyId", studyId).getResultList();
     }
     
+    /**
+     * 
+     * @param collectionId numeric id of the parent collection ("dataverse")
+     * @return list of files in the datasets that are *direct* children of the collection specified
+     * (i.e., no datafiles in sub-collections of this collection will be included)
+     */
+    public List<DataFile> findByDirectCollectionOwner(Long collectionId) {
+        String queryString = "select f from DataFile f, Dataset d where f.owner.id = d.id and d.owner.id = :collectionId order by f.id";
+        return em.createQuery(queryString, DataFile.class)
+                .setParameter("collectionId", collectionId).getResultList();
+    }
+    
     public List<DataFile> findAllRelatedByRootDatafileId(Long datafileId) {
         /* 
          Get all files with the same root datafile id
@@ -350,6 +386,18 @@ public FileMetadata findMostRecentVersionFileIsIn(DataFile file) {
             return fileMetadatas.get(0);
         }
     }
+    
+    public List<DataFile> findAllCheapAndEasy(String fileIdsAsString){ 
+        //assumption is that the fileIds are separated by ','
+        ArrayList <DataFile> dataFilesFound = new ArrayList<>();
+        String[] fileIds = fileIdsAsString.split(",");
+        DataFile df = this.findCheapAndEasy(Long.parseLong(fileIds[0]));
+        if(df != null){
+            dataFilesFound.add(df);
+        }
+
+        return dataFilesFound;
+    }
 
     public DataFile findCheapAndEasy(Long id) {
         DataFile dataFile;
@@ -357,7 +405,7 @@ public DataFile findCheapAndEasy(Long id) {
         Object[] result;
 
         try {
-            result = (Object[]) em.createNativeQuery("SELECT t0.ID, t0.CREATEDATE, t0.INDEXTIME, t0.MODIFICATIONTIME, t0.PERMISSIONINDEXTIME, t0.PERMISSIONMODIFICATIONTIME, t0.PUBLICATIONDATE, t0.CREATOR_ID, t0.RELEASEUSER_ID, t0.PREVIEWIMAGEAVAILABLE, t1.CONTENTTYPE, t0.STORAGEIDENTIFIER, t1.FILESIZE, t1.INGESTSTATUS, t1.CHECKSUMVALUE, t1.RESTRICTED, t3.ID, t2.AUTHORITY, t2.IDENTIFIER, t1.CHECKSUMTYPE, t1.PREVIOUSDATAFILEID, t1.ROOTDATAFILEID, t0.AUTHORITY, T0.PROTOCOL, T0.IDENTIFIER FROM DVOBJECT t0, DATAFILE t1, DVOBJECT t2, DATASET t3 WHERE ((t0.ID = " + id + ") AND (t0.OWNER_ID = t2.ID) AND (t2.ID = t3.ID) AND (t1.ID = t0.ID))").getSingleResult();
+            result = (Object[]) em.createNativeQuery("SELECT t0.ID, t0.CREATEDATE, t0.INDEXTIME, t0.MODIFICATIONTIME, t0.PERMISSIONINDEXTIME, t0.PERMISSIONMODIFICATIONTIME, t0.PUBLICATIONDATE, t0.CREATOR_ID, t0.RELEASEUSER_ID, t0.PREVIEWIMAGEAVAILABLE, t1.CONTENTTYPE, t0.STORAGEIDENTIFIER, t1.FILESIZE, t1.INGESTSTATUS, t1.CHECKSUMVALUE, t1.RESTRICTED, t3.ID, t2.AUTHORITY, t2.IDENTIFIER, t1.CHECKSUMTYPE, t1.PREVIOUSDATAFILEID, t1.ROOTDATAFILEID, t0.AUTHORITY, T0.PROTOCOL, T0.IDENTIFIER, t2.PROTOCOL FROM DVOBJECT t0, DATAFILE t1, DVOBJECT t2, DATASET t3 WHERE ((t0.ID = " + id + ") AND (t0.OWNER_ID = t2.ID) AND (t2.ID = t3.ID) AND (t1.ID = t0.ID))").getSingleResult();
         } catch (Exception ex) {
             return null;
         }
@@ -501,7 +549,9 @@ public DataFile findCheapAndEasy(Long id) {
         if (identifier != null) {
             dataFile.setIdentifier(identifier);
         }
-                
+        
+        owner.setProtocol((String) result[25]);
+        
         dataFile.setOwner(owner);
 
         // If content type indicates it's tabular data, spend 2 extra queries 
@@ -559,265 +609,25 @@ public DataFile findCheapAndEasy(Long id) {
         
         return dataFile;
     }
-    /* 
-     * This is an experimental method for populating the versions of 
-     * the datafile with the filemetadatas, optimized for making as few db 
-     * queries as possible. 
-     * It should only be used to retrieve filemetadata for the DatasetPage!
-     * It is not guaranteed to adequately perform anywhere else. 
-    */
-
-    public void findFileMetadataOptimizedExperimental(Dataset owner, DatasetVersion version, AuthenticatedUser au) {
-        List<DataFile> dataFiles = new ArrayList<>();
-        List<DataTable> dataTables = new ArrayList<>();
-        //List<FileMetadata> retList = new ArrayList<>(); 
-        
-        // TODO: 
-        //  replace these maps with simple lists and run binary search on them. -- 4.2.1
-        
-        Map<Long, AuthenticatedUser> userMap = new HashMap<>(); 
-        Map<Long, Integer> filesMap = new HashMap<>();
-        Map<Long, Integer> datatableMap = new HashMap<>();
-        Map<Long, Integer> categoryMap = new HashMap<>();
-        Map<Long, Set<Integer>> fileTagMap = new HashMap<>();
-        List<Long> accessRequestFileIds = new ArrayList();
-        
-        List<String> fileTagLabels = DataFileTag.listTags();
-        
-        
-        int i = 0; 
-        //Cache responses
-        Map<Long, Embargo> embargoMap = new HashMap<Long, Embargo>();
-        
-        List<Object[]> dataTableResults = em.createNativeQuery("SELECT t0.ID, t0.DATAFILE_ID, t0.UNF, t0.CASEQUANTITY, t0.VARQUANTITY, t0.ORIGINALFILEFORMAT, t0.ORIGINALFILESIZE, t0.ORIGINALFILENAME FROM dataTable t0, dataFile t1, dvObject t2 WHERE ((t0.DATAFILE_ID = t1.ID) AND (t1.ID = t2.ID) AND (t2.OWNER_ID = " + owner.getId() + ")) ORDER BY t0.ID").getResultList();
-        
-        for (Object[] result : dataTableResults) {
-            DataTable dataTable = new DataTable(); 
-            long fileId = ((Number) result[1]).longValue();
-
-            dataTable.setId(((Number) result[1]).longValue());
-            
-            dataTable.setUnf((String)result[2]);
-            
-            dataTable.setCaseQuantity((Long)result[3]);
-            
-            dataTable.setVarQuantity((Long)result[4]);
-            
-            dataTable.setOriginalFileFormat((String)result[5]);
-            
-            dataTable.setOriginalFileSize((Long)result[6]);
-            
-            dataTable.setOriginalFileName((String)result[7]);
-            
-            dataTables.add(dataTable);
-            datatableMap.put(fileId, i++);
-            
-        }
-        
-        logger.fine("Retrieved "+dataTables.size()+" DataTable objects.");
-         
-        List<Object[]> dataTagsResults = em.createNativeQuery("SELECT t0.DATAFILE_ID, t0.TYPE FROM DataFileTag t0, dvObject t1 WHERE (t1.ID = t0.DATAFILE_ID) AND (t1.OWNER_ID="+ owner.getId() + ")").getResultList();
-        for (Object[] result : dataTagsResults) {
-            Long datafile_id = (Long) result[0];
-            Integer tagtype_id = (Integer) result[1];
-            if (fileTagMap.get(datafile_id) == null) {
-                fileTagMap.put(datafile_id, new HashSet<>());
-            }
-            fileTagMap.get(datafile_id).add(tagtype_id);
-        }
-        logger.fine("Retrieved "+dataTagsResults.size()+" data tags.");
-        dataTagsResults = null;
-
-        //Only need to check for access requests if there is an authenticated user       
-        if (au != null) {
-            List<Object> accessRequests = em.createNativeQuery("SELECT t0.ID FROM DVOBJECT t0, FILEACCESSREQUESTS t1 WHERE t1.datafile_id = t0.id and t0.OWNER_ID = " + owner.getId() + "  and t1.AUTHENTICATED_USER_ID = " + au.getId() + " ORDER BY t0.ID").getResultList();
-            for (Object result : accessRequests) {               
-                accessRequestFileIds.add(Long.valueOf((Integer)result));
-            }
-            logger.fine("Retrieved " + accessRequests.size() + " access requests.");           
-            accessRequests = null;
-        }
-
-        i = 0;
-        
-        List<Object[]> fileResults = em.createNativeQuery("SELECT t0.ID, t0.CREATEDATE, t0.INDEXTIME, t0.MODIFICATIONTIME, t0.PERMISSIONINDEXTIME, t0.PERMISSIONMODIFICATIONTIME, t0.PUBLICATIONDATE, t0.CREATOR_ID, t0.RELEASEUSER_ID, t1.CONTENTTYPE, t0.STORAGEIDENTIFIER, t1.FILESIZE, t1.INGESTSTATUS, t1.CHECKSUMVALUE, t1.RESTRICTED, t1.CHECKSUMTYPE, t1.PREVIOUSDATAFILEID, t1.ROOTDATAFILEID, t0.PROTOCOL, t0.AUTHORITY, t0.IDENTIFIER, t1.EMBARGO_ID FROM DVOBJECT t0, DATAFILE t1 WHERE ((t0.OWNER_ID = " + owner.getId() + ") AND ((t1.ID = t0.ID) AND (t0.DTYPE = 'DataFile'))) ORDER BY t0.ID").getResultList(); 
     
-        for (Object[] result : fileResults) {
-            Integer file_id = (Integer) result[0];
-            
-            DataFile dataFile = new DataFile();
-            dataFile.setMergeable(false);
-            
-            dataFile.setId(file_id.longValue());
-            
-            Timestamp createDate = (Timestamp) result[1];
-            Timestamp indexTime = (Timestamp) result[2];
-            Timestamp modificationTime = (Timestamp) result[3];
-            Timestamp permissionIndexTime = (Timestamp) result[4];
-            Timestamp permissionModificationTime = (Timestamp) result[5];
-            Timestamp publicationDate = (Timestamp) result[6];
-            
-            dataFile.setCreateDate(createDate);
-            dataFile.setIndexTime(indexTime);
-            dataFile.setModificationTime(modificationTime);
-            dataFile.setPermissionIndexTime(permissionIndexTime);
-            dataFile.setPermissionModificationTime(permissionModificationTime);
-            dataFile.setPublicationDate(publicationDate);
-            
-            Long creatorId = (Long) result[7]; 
-            if (creatorId != null) {
-                AuthenticatedUser creator = userMap.get(creatorId);
-                if (creator == null) {
-                    creator = userService.find(creatorId);
-                    if (creator != null) {
-                        userMap.put(creatorId, creator);
-                    }
-                }
-                if (creator != null) {
-                    dataFile.setCreator(creator);
-                }
-            }
-            
-            dataFile.setOwner(owner);
-            
-            Long releaseUserId = (Long) result[8]; 
-            if (releaseUserId != null) {
-                AuthenticatedUser releaseUser = userMap.get(releaseUserId);
-                if (releaseUser == null) {
-                    releaseUser = userService.find(releaseUserId);
-                    if (releaseUser != null) {
-                        userMap.put(releaseUserId, releaseUser);
-                    }
-                }
-                if (releaseUser != null) {
-                    dataFile.setReleaseUser(releaseUser);
-                }
-            }
-            
-            String contentType = (String) result[9]; 
-            
-            if (contentType != null) {
-                dataFile.setContentType(contentType);
-            }
-            
-            String storageIdentifier = (String) result[10];
-            
-            if (storageIdentifier != null) {
-                dataFile.setStorageIdentifier(storageIdentifier);
-            }
-            
-            Long fileSize = (Long) result[11];
-            
-            if (fileSize != null) {
-                dataFile.setFilesize(fileSize);
+    private List<AuthenticatedUser> retrieveFileAccessRequesters(DataFile fileIn) {
+        List<AuthenticatedUser> retList = new ArrayList<>();
+
+        // List<Object> requesters = em.createNativeQuery("select authenticated_user_id
+        // from fileaccessrequests where datafile_id =
+        // "+fileIn.getId()).getResultList();
+        TypedQuery<Long> typedQuery = em.createQuery("select f.user.id from FileAccessRequest f where f.dataFile.id = :file_id and f.requestState= :requestState", Long.class);
+        typedQuery.setParameter("file_id", fileIn.getId());
+        typedQuery.setParameter("requestState", FileAccessRequest.RequestState.CREATED);
+        List<Long> requesters = typedQuery.getResultList();
+        for (Long userId : requesters) {
+            AuthenticatedUser user = userService.find(userId);
+            if (user != null) {
+                retList.add(user);
             }
-            
-            if (result[12] != null) {
-                String ingestStatusString = (String) result[12];
-                dataFile.setIngestStatus(ingestStatusString.charAt(0));
-            }
-            
-            String md5 = (String) result[13]; 
-            
-            if (md5 != null) {
-                dataFile.setChecksumValue(md5);
-            }
-            
-            Boolean restricted = (Boolean) result[14];
-            if (restricted != null) {
-                dataFile.setRestricted(restricted);
-            }
-
-            String checksumType = (String) result[15];
-            if (checksumType != null) {
-                try {
-                    // In the database we store "SHA1" rather than "SHA-1".
-                    DataFile.ChecksumType typeFromStringInDatabase = DataFile.ChecksumType.valueOf(checksumType);
-                    dataFile.setChecksumType(typeFromStringInDatabase);
-                } catch (IllegalArgumentException ex) {
-                    logger.info("Exception trying to convert " + checksumType + " to enum: " + ex);
-                }
-            }
-
-            Long previousDataFileId = (Long) result[16];
-            if (previousDataFileId != null) {
-                dataFile.setPreviousDataFileId(previousDataFileId);
-            }
-            
-            Long rootDataFileId = (Long) result[17];
-            if (rootDataFileId != null) {
-                dataFile.setRootDataFileId(rootDataFileId);
-            }
-            
-            String protocol = (String) result[18];
-            if (protocol != null) {
-                dataFile.setProtocol(protocol);
-            }
-            
-            String authority = (String) result[19];
-            if (authority != null) {
-                dataFile.setAuthority(authority);
-            }
-            
-            String identifier = (String) result[20];
-            if (identifier != null) {
-                dataFile.setIdentifier(identifier);
-            }
-            
-            Long embargo_id = (Long) result[21];
-            if (embargo_id != null) {
-                if (embargoMap.containsKey(embargo_id)) {
-                    dataFile.setEmbargo(embargoMap.get(embargo_id));
-                } else {
-                    Embargo e = embargoService.findByEmbargoId(embargo_id);
-                    dataFile.setEmbargo(e);
-                    embargoMap.put(embargo_id, e);
-                }
-            }
-            
-            // TODO: 
-            // - if ingest status is "bad", look up the ingest report; 
-            // - is it a dedicated thumbnail for the dataset? (do we ever need that info?? - not on the dataset page, I don't think...)
-            
-            // Is this a tabular file? 
-            
-            if (datatableMap.get(dataFile.getId()) != null) {
-                dataTables.get(datatableMap.get(dataFile.getId())).setDataFile(dataFile);
-                dataFile.setDataTable(dataTables.get(datatableMap.get(dataFile.getId())));
-                
-            }            
-
-            if (fileTagMap.get(dataFile.getId()) != null) {
-                for (Integer tag_id : fileTagMap.get(dataFile.getId())) {
-                    DataFileTag tag = new DataFileTag();
-                    tag.setTypeByLabel(fileTagLabels.get(tag_id));
-                    tag.setDataFile(dataFile);
-                    dataFile.addTag(tag);
-                }
-            } 
-            
-            if (dataFile.isRestricted() && accessRequestFileIds.contains(dataFile.getId())) {
-                dataFile.setFileAccessRequesters(Collections.singletonList(au));
-            } 
-
-            dataFiles.add(dataFile);
-            filesMap.put(dataFile.getId(), i++);
-        }
-        fileResults = null;
-        
-        logger.fine("Retrieved and cached "+i+" datafiles.");
-
-        i = 0; 
-        for (DataFileCategory fileCategory : owner.getCategories()) {
-            //logger.fine("category: id="+fileCategory.getId());
-            categoryMap.put(fileCategory.getId(), i++);
         }
-        
-        logger.fine("Retrieved "+i+" file categories attached to the dataset.");
 
-        version.setFileMetadatas(retrieveFileMetadataForVersion(owner, version, dataFiles, filesMap, categoryMap));
-        logger.fine("Retrieved " + version.getFileMetadatas().size() + " filemetadatas for the version " + version.getId());
-        owner.setFiles(dataFiles);
+        return retList;
     }
     
     private List<FileMetadata> retrieveFileMetadataForVersion(Dataset dataset, DatasetVersion version, List<DataFile> dataFiles, Map<Long, Integer> filesMap, Map<Long, Integer> categoryMap) {
@@ -1126,7 +936,7 @@ public boolean isThumbnailAvailable (DataFile file) {
         }
         
         // If thumbnails are not even supported for this class of files, 
-        // there's notthing to talk about:      
+        // there's nothing to talk about:      
         if (!FileUtil.isThumbnailSupported(file)) {
             return false;
         }
@@ -1141,16 +951,16 @@ public boolean isThumbnailAvailable (DataFile file) {
          is more important... 
         
         */
-                
         
-       if (ImageThumbConverter.isThumbnailAvailable(file)) {
-           file = this.find(file.getId());
-           file.setPreviewImageAvailable(true);
-           this.save(file); 
-           return true;
-       }
-
-       return false;
+        file = this.find(file.getId());
+        if (ImageThumbConverter.isThumbnailAvailable(file)) {
+            file.setPreviewImageAvailable(true);
+            this.save(file);
+            return true;
+        }
+        file.setPreviewImageFail(true);
+        this.save(file);
+        return false;
     }
 
     
@@ -1427,75 +1237,6 @@ public List<Long> selectFilesWithMissingOriginalSizes() {
         }
     }
     
-    public String generateDataFileIdentifier(DataFile datafile, GlobalIdServiceBean idServiceBean) {
-        String doiIdentifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString");
-        String doiDataFileFormat = settingsService.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT");
-
-        String prepend = "";
-        if (doiDataFileFormat.equals(SystemConfig.DataFilePIDFormat.DEPENDENT.toString())){
-            //If format is dependent then pre-pend the dataset identifier 
-            prepend = datafile.getOwner().getIdentifier() + "/";
-        } else {
-            //If there's a shoulder prepend independent identifiers with it
-        	prepend = settingsService.getValueForKey(SettingsServiceBean.Key.Shoulder, "");
-        }
- 
-        switch (doiIdentifierType) {
-            case "randomString":               
-                return generateIdentifierAsRandomString(datafile, idServiceBean, prepend);
-            case "storedProcGenerated":
-                if (doiDataFileFormat.equals(SystemConfig.DataFilePIDFormat.INDEPENDENT.toString())){ 
-                    return generateIdentifierFromStoredProcedureIndependent(datafile, idServiceBean, prepend);
-                } else {
-                    return generateIdentifierFromStoredProcedureDependent(datafile, idServiceBean, prepend);
-                }
-            default:
-                /* Should we throw an exception instead?? -- L.A. 4.6.2 */
-                return generateIdentifierAsRandomString(datafile, idServiceBean, prepend);
-        }
-    }
-    
-    private String generateIdentifierAsRandomString(DataFile datafile, GlobalIdServiceBean idServiceBean, String prepend) {
-        String identifier = null;
-        do {
-            identifier = prepend + RandomStringUtils.randomAlphanumeric(6).toUpperCase();  
-        } while (!isGlobalIdUnique(identifier, datafile, idServiceBean));
-
-        return identifier;
-    }
-
-
-    private String generateIdentifierFromStoredProcedureIndependent(DataFile datafile, GlobalIdServiceBean idServiceBean, String prepend) {
-        String identifier; 
-        do {
-            StoredProcedureQuery query = this.em.createNamedStoredProcedureQuery("Dataset.generateIdentifierFromStoredProcedure");
-            query.execute();
-            String identifierFromStoredProcedure = (String) query.getOutputParameterValue(1);
-            // some diagnostics here maybe - is it possible to determine that it's failing 
-            // because the stored procedure hasn't been created in the database?
-            if (identifierFromStoredProcedure == null) {
-                return null; 
-            }
-            identifier = prepend + identifierFromStoredProcedure;
-        } while (!isGlobalIdUnique(identifier, datafile, idServiceBean));
-        
-        return identifier;
-    }
-    
-    private String generateIdentifierFromStoredProcedureDependent(DataFile datafile, GlobalIdServiceBean idServiceBean, String prepend) {
-        String identifier;
-        Long retVal;
-
-        retVal = new Long(0);
-
-        do {
-            retVal++;
-            identifier = prepend + retVal.toString();
-
-        } while (!isGlobalIdUnique(identifier, datafile, idServiceBean));
-
-        return identifier;
-    }
 
     /**
      * Check that a identifier entered by the user is unique (not currently used
@@ -1506,38 +1247,6 @@ private String generateIdentifierFromStoredProcedureDependent(DataFile datafile,
      * @param idServiceBean
      * @return  {@code true} iff the global identifier is unique.
      */
-    public boolean isGlobalIdUnique(String userIdentifier, DataFile datafile, GlobalIdServiceBean idServiceBean) {
-        String testProtocol = "";
-        String testAuthority = "";
-        if (datafile.getAuthority() != null){
-            testAuthority = datafile.getAuthority();
-        } else {
-            testAuthority = settingsService.getValueForKey(SettingsServiceBean.Key.Authority);
-        }
-        if (datafile.getProtocol() != null){
-            testProtocol = datafile.getProtocol();
-        } else {
-            testProtocol = settingsService.getValueForKey(SettingsServiceBean.Key.Protocol);
-        }
-        
-        boolean u = em.createNamedQuery("DvObject.findByProtocolIdentifierAuthority")
-            .setParameter("protocol", testProtocol)
-            .setParameter("authority", testAuthority)
-            .setParameter("identifier",userIdentifier)
-            .getResultList().isEmpty();
-            
-        try{
-            if (idServiceBean.alreadyExists(new GlobalId(testProtocol, testAuthority, userIdentifier))) {
-                u = false;
-            }
-        } catch (Exception e){
-            //we can live with failure - means identifier not found remotely
-        }
-
-       
-        return u;
-    }
-    
     public void finalizeFileDelete(Long dataFileId, String storageLocation) throws IOException {
         // Verify that the DataFile no longer exists: 
         if (find(dataFileId) != null) {
@@ -1657,4 +1366,39 @@ public Embargo findEmbargo(Long id) {
         DataFile d = find(id);
         return d.getEmbargo();
     }
+    
+    /**
+     * Checks if the supplied DvObjectContainer (Dataset or Collection; although
+     * only collection-level storage quotas are officially supported as of now)
+     * has a quota configured, and if not, keeps checking if any of the direct
+     * ancestor Collections further up have a configured quota. If it finds one, 
+     * it will retrieve the current total content size for that specific ancestor 
+     * dvObjectContainer and use it to define the quota limit for the upload
+     * session in progress. 
+     * 
+     * @param parent - DvObjectContainer, Dataset or Collection
+     * @return upload session size limit spec, or null if quota not defined on 
+     * any of the ancestor DvObjectContainers
+     */
+    public UploadSessionQuotaLimit getUploadSessionQuotaLimit(DvObjectContainer parent) {
+        DvObjectContainer testDvContainer = parent; 
+        StorageQuota quota = testDvContainer.getStorageQuota();
+        while (quota == null && testDvContainer.getOwner() != null) {
+            testDvContainer = testDvContainer.getOwner();
+            quota = testDvContainer.getStorageQuota();
+            if (quota != null) {
+                break;
+            }
+        }    
+        if (quota == null || quota.getAllocation() == null) {
+            return null; 
+        }
+        
+        // Note that we are checking the recorded storage use not on the 
+        // immediate parent necessarily, but on the specific ancestor 
+        // DvObjectContainer on which the storage quota is defined:
+        Long currentSize = storageUseService.findStorageSizeByDvContainerId(testDvContainer.getId()); 
+        
+        return new UploadSessionQuotaLimit(quota.getAllocation(), currentSize);
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
index 275d47cf1de..351c4032939 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java
@@ -11,15 +11,15 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -58,7 +58,7 @@ public enum TagType {Survey, TimeSeries, Panel, Event, Genomics, Network, Geospa
     
     private static final Map<TagType, String> TagTypeToLabels = new HashMap<>();
     
-    private static final Map<String, TagType> TagLabelToTypes = new HashMap<>();
+    public static final Map<String, TagType> TagLabelToTypes = new HashMap<>();
     
     
     static {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTable.java b/src/main/java/edu/harvard/iq/dataverse/DataTable.java
index 614e7394583..a17d8c65138 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataTable.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataTable.java
@@ -7,26 +7,23 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import java.util.ArrayList;
 import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.validation.constraints.Size;
-import javax.persistence.OrderBy;
-import org.hibernate.validator.constraints.NotBlank;
-import org.hibernate.validator.constraints.URL;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.validation.constraints.Size;
+import jakarta.persistence.OrderBy;
 
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Index;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Index;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java b/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java
index 2f987dde82b..713c86190fc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataTagsAPITestingBean.java
@@ -5,11 +5,11 @@
 import java.io.Serializable;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.faces.context.FacesContext;
-import javax.inject.Named;
-import javax.json.JsonObject;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Named;
+import jakarta.json.JsonObject;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java b/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java
index 5cf9c623bde..eeda70c1f17 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataTagsContainer.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.Stateless;
-import javax.json.JsonObject;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonObject;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
index d7e7271738d..a2f560bc959 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
@@ -17,24 +17,25 @@
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.NamedStoredProcedureQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.ParameterMode;
-import javax.persistence.StoredProcedureParameter;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.NamedStoredProcedureQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.ParameterMode;
+import jakarta.persistence.StoredProcedureParameter;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.storageuse.StorageUse;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 
@@ -43,6 +44,10 @@
  * @author skraffmiller
  */
 @NamedQueries({
+    // Dataset.findById should only be used if you're going to iterate over files (otherwise, lazy loading in DatasetService.find() is better).
+    // If you are going to iterate over files, preferably call the DatasetService.findDeep() method i.s.o. using this query directly.
+    @NamedQuery(name = "Dataset.findById", 
+                query = "SELECT o FROM Dataset o LEFT JOIN FETCH o.files WHERE o.id=:id"),
     @NamedQuery(name = "Dataset.findIdStale",
                query = "SELECT d.id FROM Dataset d WHERE d.indexTime is NULL OR d.indexTime < d.modificationTime"),
     @NamedQuery(name = "Dataset.findIdStalePermission",
@@ -154,6 +159,23 @@ public void setCitationDateDatasetFieldType(DatasetFieldType citationDateDataset
         this.citationDateDatasetFieldType = citationDateDatasetFieldType;
     }    
 
+    // Per DataCite best practices, the citation date of a dataset may need 
+    // to be adjusted to reflect the latest embargo availability date of any 
+    // file within the first published version. 
+    // If any files are embargoed in the first version, this date will be
+    // calculated and cached here upon its publication, in the 
+    // FinalizeDatasetPublicationCommand. 
+    private Timestamp embargoCitationDate;
+    
+    public Timestamp getEmbargoCitationDate() {
+        return embargoCitationDate;
+    }
+
+    public void setEmbargoCitationDate(Timestamp embargoCitationDate) {
+        this.embargoCitationDate = embargoCitationDate;
+    }
+    
+    
     
     @ManyToOne
     @JoinColumn(name="template_id",nullable = true)
@@ -168,6 +190,10 @@ public void setTemplate(Template template) {
     }
 
     public Dataset() {
+        this(false);
+    }
+    
+    public Dataset(boolean isHarvested) {
         DatasetVersion datasetVersion = new DatasetVersion();
         datasetVersion.setDataset(this);
         datasetVersion.setVersionState(DatasetVersion.VersionState.DRAFT);
@@ -175,6 +201,11 @@ public Dataset() {
         datasetVersion.setVersionNumber((long) 1);
         datasetVersion.setMinorVersionNumber((long) 0);
         versions.add(datasetVersion);
+        
+        if (!isHarvested) {
+            StorageUse storageUse = new StorageUse(this); 
+            this.setStorageUse(storageUse);
+        }
     }
     
     /**
@@ -258,7 +289,7 @@ public void setFileAccessRequest(boolean fileAccessRequest) {
     }
 
     public String getPersistentURL() {
-        return new GlobalId(this).toURL().toString();
+        return this.getGlobalId().asURL();
     }
     
     public List<DataFile> getFiles() {
@@ -672,20 +703,10 @@ public Timestamp getCitationDate() {
         Timestamp citationDate = null;
         //Only calculate if this dataset doesn't use an alternate date field for publication date
         if (citationDateDatasetFieldType == null) {
-            List<DatasetVersion> versions = this.versions;
-            // TODo - is this ever not version 1.0 (or draft if not published yet)
-            DatasetVersion oldest = versions.get(versions.size() - 1);
             citationDate = super.getPublicationDate();
-            if (oldest.isPublished()) {
-                List<FileMetadata> fms = oldest.getFileMetadatas();
-                for (FileMetadata fm : fms) {
-                    Embargo embargo = fm.getDataFile().getEmbargo();
-                    if (embargo != null) {
-                        Timestamp embDate = Timestamp.valueOf(embargo.getDateAvailable().atStartOfDay());
-                        if (citationDate.compareTo(embDate) < 0) {
-                            citationDate = embDate;
-                        }
-                    }
+            if (embargoCitationDate != null) {
+                if (citationDate.compareTo(embargoCitationDate) < 0) {
+                    return embargoCitationDate;
                 }
             }
         }
@@ -765,13 +786,13 @@ public String getLocalURL() {
     public String getRemoteArchiveURL() {
         if (isHarvested()) {
             if (HarvestingClient.HARVEST_STYLE_DATAVERSE.equals(this.getHarvestedFrom().getHarvestStyle())) {
-                return this.getHarvestedFrom().getArchiveUrl() + "/dataset.xhtml?persistentId=" + getGlobalIdString();
+                return this.getHarvestedFrom().getArchiveUrl() + "/dataset.xhtml?persistentId=" + getGlobalId().asString();
             } else if (HarvestingClient.HARVEST_STYLE_VDC.equals(this.getHarvestedFrom().getHarvestStyle())) {
                 String rootArchiveUrl = this.getHarvestedFrom().getHarvestingUrl();
                 int c = rootArchiveUrl.indexOf("/OAIHandler");
                 if (c > 0) {
                     rootArchiveUrl = rootArchiveUrl.substring(0, c);
-                    return rootArchiveUrl + "/faces/study/StudyPage.xhtml?globalId=" + getGlobalIdString();
+                    return rootArchiveUrl + "/faces/study/StudyPage.xhtml?globalId=" + getGlobalId().asString();
                 }
             } else if (HarvestingClient.HARVEST_STYLE_ICPSR.equals(this.getHarvestedFrom().getHarvestStyle())) {
                 // For the ICPSR, it turns out that the best thing to do is to 
@@ -858,6 +879,12 @@ public String getHarvestingDescription() {
         return null;
     }
 
+    public boolean hasEnabledGuestbook(){
+        Guestbook gb = this.getGuestbook();
+
+        return ( gb != null && gb.isEnabled());
+    }
+    
     @Override
     public boolean equals(Object object) {
         // TODO: Warning - this method won't work in the case the id fields are not set
@@ -881,7 +908,12 @@ public <T> T accept(Visitor<T> v) {
     @Override
     public String getDisplayName() {
         DatasetVersion dsv = getReleasedVersion();
-        return dsv != null ? dsv.getTitle() : getLatestVersion().getTitle();
+        String result = dsv != null ? dsv.getTitle() : getLatestVersion().getTitle();
+        boolean resultIsEmpty = result == null || "".equals(result);
+        if (resultIsEmpty && getGlobalId() != null) {
+            return getGlobalId().asString();
+        }
+        return result;
     }
     
     @Override
@@ -915,4 +947,8 @@ public DatasetThumbnail getDatasetThumbnail(DatasetVersion datasetVersion, int s
         return DatasetUtil.getThumbnail(this, datasetVersion, size);
     }
 
+    @Override
+    public String getTargetUrl() {
+        return Dataset.TARGET_URL;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java b/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java
index 2d19cf5fe06..b779e084250 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetConverter.java
@@ -6,12 +6,12 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 @FacesConverter("datasetConverter")
 public class DatasetConverter implements Converter {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java b/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java
index 00936b9365a..3252b7f0367 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetDistributor.java
@@ -7,7 +7,7 @@
 package edu.harvard.iq.dataverse;
 
 import java.util.Comparator;
-import javax.persistence.Version;
+import jakarta.persistence.Version;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
index 31d08f84c02..c836a20893f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java
@@ -19,20 +19,20 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.JoinTable;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 import org.apache.commons.lang3.StringUtils;
 
 @Entity
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java
index 5d83f1e4f8d..c679cd7edad 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldCompoundValue.java
@@ -14,17 +14,17 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.tuple.ImmutablePair;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
index 6d26c0cba58..1621b80df55 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldConstant.java
@@ -6,8 +6,8 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.enterprise.context.Dependent;
-import javax.inject.Named;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Named;
 
 /**
  *
@@ -112,8 +112,8 @@ public class DatasetFieldConstant implements java.io.Serializable  {
     public final static String geographicUnit="geographicUnit";
     public final static String westLongitude="westLongitude";
     public final static String eastLongitude="eastLongitude";
-    public final static String northLatitude="northLongitude"; //Changed to match DB - incorrectly entered into DB
-    public final static String southLatitude="southLongitude"; //Incorrect in DB
+    public final static String northLatitude="northLongitude"; //Changed to match DB - incorrectly entered into DB: https://github.com/IQSS/dataverse/issues/5645
+    public final static String southLatitude="southLongitude"; //Incorrect in DB: https://github.com/IQSS/dataverse/issues/5645
     public final static String unitOfAnalysis="unitOfAnalysis";
     public final static String universe="universe";
     public final static String kindOfData="kindOfData";
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java
index bad482dbca9..8ac98500890 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java
@@ -8,18 +8,18 @@
 
 import java.io.Serializable;
 import java.util.Collection;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
 
 /**
  *
@@ -113,7 +113,7 @@ public int hashCode() {
 
     @Override
     public boolean equals(Object object) {
-        if (!(object instanceof DatasetField)) {
+        if (!(object instanceof DatasetFieldDefaultValue)) {
             return false;
         }
         DatasetFieldDefaultValue other = (DatasetFieldDefaultValue) object;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
index 9bc5a5c09a7..ce2b00086ec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java
@@ -17,22 +17,24 @@
 import java.util.Set;
 import java.util.logging.Logger;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonException;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonException;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.httpclient.HttpException;
@@ -343,33 +345,33 @@ public Map<Long, JsonObject> getCVocConf(boolean byTermUriField){
     public void registerExternalVocabValues(DatasetField df) {
         DatasetFieldType dft =df.getDatasetFieldType(); 
         logger.fine("Registering for field: " + dft.getName());
-        JsonObject cvocEntry = getCVocConf(false).get(dft.getId());
-        if(dft.isPrimitive()) {
-            for(DatasetFieldValue dfv: df.getDatasetFieldValues()) {
+        JsonObject cvocEntry = getCVocConf(true).get(dft.getId());
+        if (dft.isPrimitive()) {
+            for (DatasetFieldValue dfv : df.getDatasetFieldValues()) {
                 registerExternalTerm(cvocEntry, dfv.getValue());
             }
-            } else {
-                if (df.getDatasetFieldType().isCompound()) {
-                    DatasetFieldType termdft = findByNameOpt(cvocEntry.getString("term-uri-field"));
-                    for (DatasetFieldCompoundValue cv : df.getDatasetFieldCompoundValues()) {
-                        for (DatasetField cdf : cv.getChildDatasetFields()) {
-                            logger.fine("Found term uri field type id: " + cdf.getDatasetFieldType().getId());
-                            if(cdf.getDatasetFieldType().equals(termdft)) {
-                                registerExternalTerm(cvocEntry, cdf.getValue());
-                            }
+        } else {
+            if (df.getDatasetFieldType().isCompound()) {
+                DatasetFieldType termdft = findByNameOpt(cvocEntry.getString("term-uri-field"));
+                for (DatasetFieldCompoundValue cv : df.getDatasetFieldCompoundValues()) {
+                    for (DatasetField cdf : cv.getChildDatasetFields()) {
+                        logger.fine("Found term uri field type id: " + cdf.getDatasetFieldType().getId());
+                        if (cdf.getDatasetFieldType().equals(termdft)) {
+                            registerExternalTerm(cvocEntry, cdf.getValue());
                         }
                     }
                 }
             }
+        }
     }
     
     /**
      * Retrieves indexable strings from a cached externalvocabularyvalue entry.
      * 
      * This method assumes externalvocabularyvalue entries have been filtered and
-     * the externalvocabularyvalue entry contain a single JsonObject whose values
-     * are either Strings or an array of objects with "lang" and "value" keys. The
-     * string, or the "value"s for each language are added to the set.
+     * the externalvocabularyvalue entry contain a single JsonObject whose "personName" or "termName" values
+     * are either Strings or an array of objects with "lang" and ("value" or "content") keys. The
+     * string, or the "value/content"s for each language are added to the set.
      * 
      * Any parsing error results in no entries (there can be unfiltered entries with
      * unknown structure - getting some strings from such an entry could give fairly
@@ -385,16 +387,25 @@ public Set<String> getStringsFor(String termUri) {
         if (jo != null) {
             try {
                 for (String key : jo.keySet()) {
-                    JsonValue jv = jo.get(key);
-                    if (jv.getValueType().equals(JsonValue.ValueType.STRING)) {
-                        logger.fine("adding " + jo.getString(key) + " for " + termUri);
-                        strings.add(jo.getString(key));
-                    } else {
-                        if (jv.getValueType().equals(JsonValue.ValueType.ARRAY)) {
-                            JsonArray jarr = jv.asJsonArray();
-                            for (int i = 0; i < jarr.size(); i++) {
-                                logger.fine("adding " + jarr.getJsonObject(i).getString("value") + " for " + termUri);
-                                strings.add(jarr.getJsonObject(i).getString("value"));
+                    if (key.equals("termName") || key.equals("personName")) {
+                        JsonValue jv = jo.get(key);
+                        if (jv.getValueType().equals(JsonValue.ValueType.STRING)) {
+                            logger.fine("adding " + jo.getString(key) + " for " + termUri);
+                            strings.add(jo.getString(key));
+                        } else {
+                            if (jv.getValueType().equals(JsonValue.ValueType.ARRAY)) {
+                                JsonArray jarr = jv.asJsonArray();
+                                for (int i = 0; i < jarr.size(); i++) {
+                                    JsonObject entry = jarr.getJsonObject(i);
+                                    if (entry.containsKey("value")) {
+                                        logger.fine("adding " + entry.getString("value") + " for " + termUri);
+                                        strings.add(entry.getString("value"));
+                                    } else if (entry.containsKey("content")) {
+                                        logger.fine("adding " + entry.getString("content") + " for " + termUri);
+                                        strings.add(entry.getString("content"));
+
+                                    }
+                                }
                             }
                         }
                     }
@@ -410,7 +421,7 @@ public Set<String> getStringsFor(String termUri) {
     }    
 
     /**
-     * Perform a query to retrieve a cached valie from the externalvocabularvalue table
+     * Perform a query to retrieve a cached value from the externalvocabularvalue table
      * @param termUri
      * @return - the entry's value as a JsonObject
      */
@@ -444,9 +455,25 @@ public void registerExternalTerm(JsonObject cvocEntry, String term) {
             logger.fine("Ingoring blank term");
             return;
         }
+        boolean isExternal = false;
+        JsonObject vocabs = cvocEntry.getJsonObject("vocabs");
+        for (String key: vocabs.keySet()) {
+            JsonObject vocab = vocabs.getJsonObject(key);
+            if (vocab.containsKey("uriSpace")) {
+                if (term.startsWith(vocab.getString("uriSpace"))) {
+                    isExternal = true;
+                    break;
+                }
+            }
+        }
+        if (!isExternal) {
+            logger.fine("Ignoring free text entry: " + term);
+            return;
+        }
         logger.fine("Registering term: " + term);
         try {
-            URI uri = new URI(term);
+            //Assure the term is in URI form - should be if the uriSpace entry was correct
+            new URI(term);
             ExternalVocabularyValue evv = null;
             try {
                 evv = em.createQuery("select object(o) from ExternalVocabularyValue as o where o.uri=:uri",
@@ -473,7 +500,8 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep
                         .setRetryHandler(new DefaultHttpRequestRetryHandler(3, false))
                         .build()) {
                     HttpGet httpGet = new HttpGet(retrievalUri);
-                    httpGet.addHeader("Accept", "application/json+ld, application/json");
+                    //application/json+ld is for backward compatibility
+                    httpGet.addHeader("Accept", "application/ld+json, application/json+ld, application/json");
 
                     HttpResponse response = httpClient.execute(httpGet);
                     String data = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8);
@@ -542,37 +570,7 @@ private JsonObject filterResponse(JsonObject cvocEntry, JsonObject readObject, S
                             String[] pathParts = param.split("/");
                             logger.fine("PP: " + String.join(", ", pathParts));
                             JsonValue curPath = readObject;
-                            for (int j = 0; j < pathParts.length - 1; j++) {
-                                if (pathParts[j].contains("=")) {
-                                    JsonArray arr = ((JsonArray) curPath);
-                                    for (int k = 0; k < arr.size(); k++) {
-                                        String[] keyVal = pathParts[j].split("=");
-                                        logger.fine("Looking for object where " + keyVal[0] + " is " + keyVal[1]);
-                                        JsonObject jo = arr.getJsonObject(k);
-                                        String val = jo.getString(keyVal[0]);
-                                        String expected = keyVal[1];
-                                        if (expected.equals("@id")) {
-                                            expected = termUri;
-                                        }
-                                        if (val.equals(expected)) {
-                                            logger.fine("Found: " + jo.toString());
-                                            curPath = jo;
-                                            break;
-                                        }
-                                    }
-                                } else {
-                                    curPath = ((JsonObject) curPath).get(pathParts[j]);
-                                    logger.fine("Found next Path object " + curPath.toString());
-                                }
-                            }
-                            JsonValue jv = ((JsonObject) curPath).get(pathParts[pathParts.length - 1]);
-                            if (jv.getValueType().equals(JsonValue.ValueType.STRING)) {
-                                vals.add(i, ((JsonString) jv).getString());
-                            } else if (jv.getValueType().equals(JsonValue.ValueType.ARRAY)) {
-                                vals.add(i, jv);
-                            } else if (jv.getValueType().equals(JsonValue.ValueType.OBJECT)) {
-                                vals.add(i, jv);
-                            }
+                            vals.add(i, processPathSegment(0, pathParts, curPath, termUri));
                             logger.fine("Added param value: " + i + ": " + vals.get(i));
                         } else {
                             logger.fine("Param is: " + param);
@@ -615,6 +613,7 @@ private JsonObject filterResponse(JsonObject cvocEntry, JsonObject readObject, S
                 } catch (Exception e) {
                     logger.warning("External Vocabulary: " + termUri + " - Failed to find value for " + filterKey + ": "
                             + e.getMessage());
+                    e.printStackTrace();
                 }
             }
         }
@@ -628,6 +627,66 @@ private JsonObject filterResponse(JsonObject cvocEntry, JsonObject readObject, S
         }
     }
 
+    Object processPathSegment(int index, String[] pathParts, JsonValue curPath, String termUri) {
+        if (index < pathParts.length - 1) {
+            if (pathParts[index].contains("=")) {
+                JsonArray arr = ((JsonArray) curPath);
+                String[] keyVal = pathParts[index].split("=");
+                logger.fine("Looking for object where " + keyVal[0] + " is " + keyVal[1]);
+                String expected = keyVal[1];
+        
+                if (!expected.equals("*")) {
+                    if (expected.equals("@id")) {
+                        expected = termUri;
+                    }
+                    for (int k = 0; k < arr.size(); k++) {
+                        JsonObject jo = arr.getJsonObject(k);
+                        String val = jo.getString(keyVal[0]);
+                        if (val.equals(expected)) {
+                            logger.fine("Found: " + jo.toString());
+                            curPath = jo;
+                            return processPathSegment(index + 1, pathParts, curPath, termUri);
+                        }
+                    }
+                } else {
+                    JsonArrayBuilder parts = Json.createArrayBuilder();
+                    for (JsonValue subPath : arr) {
+                        if (subPath instanceof JsonObject) {
+                            JsonValue nextValue = ((JsonObject) subPath).get(keyVal[0]);
+                            Object obj = processPathSegment(index + 1, pathParts, nextValue, termUri);
+                            if (obj instanceof String) {
+                                parts.add((String) obj);
+                            } else {
+                                parts.add((JsonValue) obj);
+                            }
+                        }
+                    }
+                    return parts.build();
+                }
+                
+            } else {
+                curPath = ((JsonObject) curPath).get(pathParts[index]);
+                logger.fine("Found next Path object " + curPath.toString());
+                return processPathSegment(index + 1, pathParts, curPath, termUri);
+            }
+        } else {
+            logger.fine("Last segment: " + curPath.toString());
+            logger.fine("Looking for : " + pathParts[index]);
+            JsonValue jv = ((JsonObject) curPath).get(pathParts[index]);
+            ValueType type =jv.getValueType(); 
+            if (type.equals(JsonValue.ValueType.STRING)) {
+                return ((JsonString) jv).getString();
+            } else if (jv.getValueType().equals(JsonValue.ValueType.ARRAY)) {
+                return jv;
+            } else if (jv.getValueType().equals(JsonValue.ValueType.OBJECT)) {
+                return jv;
+            }
+        }
+
+        return null;
+
+    }
+   
     /**
      * Supports validation of externally controlled values. If the value is a URI it
      * must be in the namespace (start with) one of the uriSpace values of an
@@ -669,8 +728,20 @@ public boolean isValidCVocValue(DatasetFieldType dft, String value) {
     public List<String> getVocabScripts( Map<Long, JsonObject> cvocConf) {
         //ToDo - only return scripts that are needed (those fields are set on display pages, those blocks/fields are allowed in the Dataverse collection for create/edit)?
         Set<String> scripts = new HashSet<String>();
-        for(JsonObject jo: cvocConf.values()) {
-            scripts.add(jo.getString("js-url"));
+        for (JsonObject jo : cvocConf.values()) {
+            // Allow either a single script (a string) or an array of scripts (used, for
+            // example, to allow use of the common cvocutils.js script along with a main
+            // script for the field.)
+            JsonValue scriptValue = jo.get("js-url");
+            ValueType scriptType = scriptValue.getValueType();
+            if (scriptType.equals(ValueType.STRING)) {
+                scripts.add(((JsonString) scriptValue).getString());
+            } else if (scriptType.equals(ValueType.ARRAY)) {
+                JsonArray scriptArray = ((JsonArray) scriptValue);
+                for (int i = 0; i < scriptArray.size(); i++) {
+                    scripts.add(scriptArray.getString(i));
+                }
+            }
         }
         String customScript = settingsService.getValueForKey(SettingsServiceBean.Key.ControlledVocabularyCustomJavaScript);
         if (customScript != null && !customScript.isEmpty()) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
index df126514308..824b486a42d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldType.java
@@ -13,8 +13,8 @@
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.MissingResourceException;
-import javax.faces.model.SelectItem;
-import javax.persistence.*;
+import jakarta.faces.model.SelectItem;
+import jakarta.persistence.*;
 
 /**
  * Defines the meaning and constraints of a metadata field and its values.
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java
index 3ded24d7a59..6d3fda2812d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValidator.java
@@ -5,11 +5,11 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import java.util.Collections;
+
 import java.util.List;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java
index 2447a6478fd..1064187ccd6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValue.java
@@ -10,17 +10,17 @@
 import edu.harvard.iq.dataverse.util.MarkupChecker;
 import java.io.Serializable;
 import java.util.Comparator;
-import java.util.ResourceBundle;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
index 8b807f78bca..610bb70ff49 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldValueValidator.java
@@ -8,18 +8,16 @@
 import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
+import java.util.*;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
+import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.validation.EMailValidator;
 import edu.harvard.iq.dataverse.validation.URLValidator;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.validator.routines.UrlValidator;
 
 /**
  *
@@ -34,7 +32,6 @@ public void initialize(ValidateDatasetFieldType constraintAnnotation) {
     }
 
     public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext context) {
-
         context.disableDefaultConstraintViolation(); // we do this so we can have different messages depending on the different issue
 
         boolean lengthOnly = false;
@@ -55,11 +52,43 @@ public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext conte
             return true;
         }
 
+        // verify no junk in individual fields and values are within range
+        if (dsfType.getName() != null && (dsfType.getName().equals(DatasetFieldConstant.northLatitude) || dsfType.getName().equals(DatasetFieldConstant.southLatitude) ||
+                dsfType.getName().equals(DatasetFieldConstant.westLongitude) || dsfType.getName().equals(DatasetFieldConstant.eastLongitude))) {
+            try {
+                verifyBoundingBoxCoordinatesWithinRange(dsfType.getName(), value.getValue());
+            } catch (IllegalArgumentException iae) {
+                    try {
+                        context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + "  " + BundleUtil.getStringFromBundle("dataset.metadata.invalidEntry")).addConstraintViolation();
+                    } catch (NullPointerException e) {
+                    }
+                    return false;
+            }
+        }
+
+        // validate fields that are siblings and depend on each others values
+        if (value.getDatasetField().getParentDatasetFieldCompoundValue() != null &&
+                value.getDatasetField().getParentDatasetFieldCompoundValue().getParentDatasetField().getValidationMessage() == null) {
+            Optional<String> failureMessage = validateChildConstraints(value.getDatasetField());
+            if (failureMessage.isPresent()) {
+                try {
+                    context.buildConstraintViolationWithTemplate(dsfType.getParentDatasetFieldType().getDisplayName() +  "  " +
+                            BundleUtil.getStringFromBundle(failureMessage.get()) ).addConstraintViolation();
+
+                    // save the failure message in the parent so we don't keep validating the children
+                    value.getDatasetField().getParentDatasetFieldCompoundValue().getParentDatasetField().setValidationMessage(failureMessage.get());
+
+                } catch (NullPointerException npe) {
+                }
+                return false;
+            }
+        }
+
         if (fieldType.equals(FieldType.TEXT) && !lengthOnly && value.getDatasetField().getDatasetFieldType().getValidationFormat() != null) {
             boolean valid = value.getValue().matches(value.getDatasetField().getDatasetFieldType().getValidationFormat());
             if (!valid) {
                 try {
-                    context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + " is not a valid entry.").addConstraintViolation();
+                    context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + "  " + BundleUtil.getStringFromBundle("dataset.metadata.invalidEntry")).addConstraintViolation();
                 } catch (NullPointerException e) {
                     return false;
                 }
@@ -128,7 +157,7 @@ public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext conte
             }
             if (!valid) {
                 try {
-                    context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + " is not a valid date. \"" + YYYYformat + "\" is a supported format.").addConstraintViolation();
+                    context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() +  "  " + BundleUtil.getStringFromBundle("dataset.metadata.invalidDate")  ).addConstraintViolation();
                 } catch (NullPointerException npe) {
 
                 }
@@ -143,7 +172,7 @@ public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext conte
             } catch (Exception e) {
                 logger.fine("Float value failed validation: " + value.getValue() + " (" + dsfType.getDisplayName() + ")");
                 try {
-                    context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + " is not a valid number.").addConstraintViolation();
+                    context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() +  "  " + BundleUtil.getStringFromBundle("dataset.metadata.invalidNumber") ).addConstraintViolation();
                 } catch (NullPointerException npe) {
 
                 }
@@ -157,7 +186,7 @@ public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext conte
                 Integer.parseInt(value.getValue());
             } catch (Exception e) {
                 try {
-                    context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + " is not a valid integer.").addConstraintViolation();
+                    context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() +  "  " + BundleUtil.getStringFromBundle("dataset.metadata.invalidInteger")  ).addConstraintViolation();
                 } catch (NullPointerException npe) {
 
                 }
@@ -170,7 +199,7 @@ public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext conte
         if (fieldType.equals(FieldType.URL) && !lengthOnly) {
             boolean isValidUrl = URLValidator.isURLValid(value.getValue());
             if (!isValidUrl) {
-                context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + " " + value.getValue() + "  {url.invalid}").addConstraintViolation();
+                context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + " " + value.getValue() +  "  " + BundleUtil.getStringFromBundle("dataset.metadata.invalidURL")).addConstraintViolation();
                 return false;
             }
         }
@@ -178,7 +207,7 @@ public boolean isValid(DatasetFieldValue value, ConstraintValidatorContext conte
         if (fieldType.equals(FieldType.EMAIL) && !lengthOnly) {
             boolean isValidMail = EMailValidator.isEmailValid(value.getValue());
             if (!isValidMail) {
-                context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + " " + value.getValue() + " {email.invalid}").addConstraintViolation();
+                context.buildConstraintViolationWithTemplate(dsfType.getDisplayName() + " " + value.getValue() +  "  " + BundleUtil.getStringFromBundle("dataset.metadata.invalidEmail")).addConstraintViolation();
                 return false;
             }
         }
@@ -216,4 +245,60 @@ public boolean isValidAuthorIdentifier(String userInput, Pattern pattern) {
         return pattern.matcher(userInput).matches();
     }
 
+    // Validate child fields against each other and return failure message or Optional.empty() if success
+    public Optional<String> validateChildConstraints(DatasetField dsf) {
+        final String fieldName = dsf.getDatasetFieldType().getName() != null ? dsf.getDatasetFieldType().getName() : "";
+        Optional<String> returnFailureMessage = Optional.empty();
+
+        // Validate Child Constraint for Geospatial Bounding Box
+        // validate the four points of the box to insure proper layout
+        if (fieldName.equals(DatasetFieldConstant.northLatitude) || fieldName.equals(DatasetFieldConstant.westLongitude)
+                || fieldName.equals(DatasetFieldConstant.eastLongitude) || fieldName.equals(DatasetFieldConstant.southLatitude)) {
+            final String failureMessage = "dataset.metadata.invalidGeospatialCoordinates";
+
+            try {
+                final Map<String, String> coords = new HashMap<>();
+                dsf.getParentDatasetFieldCompoundValue().getChildDatasetFields().forEach(f -> {
+                        coords.put(f.getDatasetFieldType().getName(), f.getValue());
+                });
+                if (!validateBoundingBox(coords.get(DatasetFieldConstant.westLongitude),
+                        coords.get(DatasetFieldConstant.eastLongitude),
+                        coords.get(DatasetFieldConstant.northLatitude),
+                        coords.get(DatasetFieldConstant.southLatitude))) {
+                    returnFailureMessage = Optional.of(failureMessage);
+                }
+            } catch (IllegalArgumentException e) { // IllegalArgumentException NumberFormatException
+                returnFailureMessage = Optional.of(failureMessage);
+            }
+        }
+
+        return returnFailureMessage;
+    }
+
+    public static boolean validateBoundingBox(final String westLon, final String eastLon, final String northLat, final String southLat) {
+        boolean returnVal = false;
+
+        try {
+            Float west = verifyBoundingBoxCoordinatesWithinRange(DatasetFieldConstant.westLongitude, westLon);
+            Float east = verifyBoundingBoxCoordinatesWithinRange(DatasetFieldConstant.eastLongitude, eastLon);
+            Float north = verifyBoundingBoxCoordinatesWithinRange(DatasetFieldConstant.northLatitude, northLat);
+            Float south = verifyBoundingBoxCoordinatesWithinRange(DatasetFieldConstant.southLatitude, southLat);
+            returnVal = west <= east && south <= north;
+        } catch (IllegalArgumentException e) {
+            returnVal = false;
+        }
+
+        return returnVal;
+    }
+
+    private static Float verifyBoundingBoxCoordinatesWithinRange(final String name, final String value) throws IllegalArgumentException {
+        int max = name.equals(DatasetFieldConstant.westLongitude) || name.equals(DatasetFieldConstant.eastLongitude) ? 180 : 90;
+        int min = max * -1;
+
+        final Float returnVal = value != null ? Float.parseFloat(value) : Float.NaN;
+        if (returnVal.isNaN() || returnVal < min || returnVal > max) {
+            throw new IllegalArgumentException(String.format("Value (%s) not in range (%s-%s)", returnVal.isNaN() ? "missing" : returnVal, min, max));
+        }
+        return returnVal;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java
index 8f8e9b103c1..dec07a09643 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingDataverse.java
@@ -2,19 +2,19 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java
index 3789efcd443..39c82bfa3f1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLinkingServiceBean.java
@@ -8,12 +8,13 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
@@ -63,7 +64,7 @@ public DatasetLinkingDataverse findDatasetLinkingDataverse(Long datasetId, Long
                 .setParameter("datasetId", datasetId)
                 .setParameter("linkingDataverseId", linkingDataverseId)
                 .getSingleResult();            
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             logger.fine("no datasetLinkingDataverse found for datasetId " + datasetId + " and linkingDataverseId " + linkingDataverseId);        
             return null;
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java
index 7b857545c20..cc0078ecbc5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetLock.java
@@ -20,25 +20,24 @@
 
 package edu.harvard.iq.dataverse;
 
-import static edu.harvard.iq.dataverse.DatasetLock.Reason.Workflow;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.util.Date;
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 429a0d7a4e4..b79f387f20b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -11,6 +11,9 @@
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
+import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO;
+import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import edu.harvard.iq.dataverse.dataaccess.SwiftAccessIO;
 import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
@@ -33,13 +36,14 @@
 import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.ingest.IngestRequest;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
 import edu.harvard.iq.dataverse.metadataimport.ForeignMetadataImportServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlUtil;
@@ -48,6 +52,7 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.ArchiverUtil;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.DataFileComparator;
 import edu.harvard.iq.dataverse.util.FileSortFieldAndOrder;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.JsfHelper;
@@ -56,7 +61,6 @@
 
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.WebloaderUtil;
 import edu.harvard.iq.dataverse.validation.URLValidator;
 import edu.harvard.iq.dataverse.workflows.WorkflowComment;
@@ -80,27 +84,29 @@
 import java.util.Set;
 import java.util.Collection;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.event.ValueChangeEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import java.util.stream.Collectors;
+
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.event.ValueChangeEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import org.apache.commons.lang3.StringUtils;
 import org.primefaces.event.FileUploadEvent;
 import org.primefaces.model.file.UploadedFile;
 
-import javax.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolation;
 import org.apache.commons.httpclient.HttpClient;
 //import org.primefaces.context.RequestContext;
 import java.util.Arrays;
 import java.util.HashSet;
-import javax.faces.model.SelectItem;
-import javax.faces.validator.ValidatorException;
+import jakarta.faces.model.SelectItem;
+import jakarta.faces.validator.ValidatorException;
 
 import java.util.logging.Level;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
@@ -121,12 +127,12 @@
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
 import java.util.Collections;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
 
-import javax.faces.event.AjaxBehaviorEvent;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.faces.event.AjaxBehaviorEvent;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 
 import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.mutable.MutableBoolean;
@@ -143,6 +149,8 @@
 import edu.harvard.iq.dataverse.search.SearchServiceBean;
 import edu.harvard.iq.dataverse.search.SearchUtil;
 import edu.harvard.iq.dataverse.search.SolrClientService;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.SignpostingResources;
 import edu.harvard.iq.dataverse.util.FileMetadataUtil;
 import java.util.Comparator;
 import org.apache.solr.client.solrj.SolrQuery;
@@ -230,6 +238,8 @@ public enum DisplayMode {
     ExternalToolServiceBean externalToolService;
     @EJB
     SolrClientService solrClientService;
+    @EJB
+    DvObjectServiceBean dvObjectService;
     @Inject
     DataverseRequestServiceBean dvRequestService;
     @Inject
@@ -338,7 +348,7 @@ public void setSelectedHostDataverse(Dataverse selectedHostDataverse) {
 
     private Boolean hasRsyncScript = false;
 
-    private Boolean hasTabular = false;
+    /*private Boolean hasTabular = false;*/
 
 
     /**
@@ -347,7 +357,15 @@ public void setSelectedHostDataverse(Dataverse selectedHostDataverse) {
      * sometimes you want to know about the current version ("no tabular files
      * currently"). Like all files, tabular files can be deleted.
      */
+    /**
+     * There doesn't seem to be an actual real life case where we need to know 
+     * if this dataset "has ever had a tabular file" - for all practical purposes
+     * only the versionHasTabular appears to be in use. I'm going to remove the  
+     * other boolean. 
+     */
     private boolean versionHasTabular = false;
+    private boolean versionHasGlobus = false;
+    private boolean globusTransferRequested = false;
 
     private boolean showIngestSuccess;
     
@@ -363,6 +381,19 @@ public void setShowIngestSuccess(boolean showIngestSuccess) {
         this.showIngestSuccess = showIngestSuccess;
     }
 
+    private String termsGuestbookPopupAction = "";
+
+    public void setTermsGuestbookPopupAction(String popupAction){
+        if(popupAction != null && popupAction.length() > 0){
+            this.termsGuestbookPopupAction = popupAction;
+        }
+
+    }
+
+    public String getTermsGuestbookPopupAction(){
+        return termsGuestbookPopupAction;
+    }
+    
     // TODO: Consider renaming "configureTools" to "fileConfigureTools".
     List<ExternalTool> configureTools = new ArrayList<>();
     // TODO: Consider renaming "exploreTools" to "fileExploreTools".
@@ -375,7 +406,12 @@ public void setShowIngestSuccess(boolean showIngestSuccess) {
     Map<Long, List<ExternalTool>> previewToolsByFileId = new HashMap<>();
     // TODO: Consider renaming "previewTools" to "filePreviewTools".
     List<ExternalTool> previewTools = new ArrayList<>();
+    Map<Long, List<ExternalTool>> fileQueryToolsByFileId = new HashMap<>();
+    List<ExternalTool> fileQueryTools = new ArrayList<>();
     private List<ExternalTool> datasetExploreTools;
+    private List<ExternalTool> datasetConfigureTools;
+    // The selected dataset-level configure tool
+    private ExternalTool datasetConfigureTool;
 
     public Boolean isHasRsyncScript() {
         return hasRsyncScript;
@@ -475,7 +511,7 @@ public String getThumbnailString() {
 
             thumbnailString = datasetThumbnail.getBase64image();
         } else {
-            thumbnailString = thumbnailServiceWrapper.getDatasetCardImageAsBase64Url(dataset,
+            thumbnailString = thumbnailServiceWrapper.getDatasetCardImageAsUrl(dataset,
                     workingVersion.getId(),
                     !workingVersion.isDraft(),
                     ImageThumbConverter.DEFAULT_DATASETLOGO_SIZE);
@@ -503,6 +539,16 @@ public void setRemoveUnusedTags(boolean removeUnusedTags) {
 
     private String fileSortField;
     private String fileSortOrder;
+    private boolean tagPresort = true;
+    private boolean folderPresort = true;
+    // Due to what may be a bug in PrimeFaces, the call to select a new page of
+    // files appears to reset the two presort booleans to false. The following
+    // values are a flag and duplicate booleans to remember what the new values were
+    // so that they can be set only in real checkbox changes. Further comments where
+    // these are used.
+    boolean isPageFlip = false;
+    private boolean newTagPresort = true;
+    private boolean newFolderPresort = true;
 
     public List<Entry<String,String>> getCartList() {
         if (session.getUser() instanceof AuthenticatedUser) {
@@ -665,70 +711,46 @@ public void showAll(){
     }
 
     private List<FileMetadata> selectFileMetadatasForDisplay() {
-        Set<Long> searchResultsIdSet = null;
-
-        if (isIndexedVersion()) {
+        final Set<Long> searchResultsIdSet;
+        if (isIndexedVersion() && StringUtil.isEmpty(fileLabelSearchTerm) && StringUtil.isEmpty(fileTypeFacet) && StringUtil.isEmpty(fileAccessFacet) && StringUtil.isEmpty(fileTagsFacet)) {
+            // Indexed version: we need facets, they are set as a side effect of getFileIdsInVersionFromSolr method.
+            // But, no search terms were specified, we will return the full
+            // list of the files in the version: we discard the result from getFileIdsInVersionFromSolr.
+            getFileIdsInVersionFromSolr(workingVersion.getId(), this.fileLabelSearchTerm);
+            // Since the search results should include the full set of fmds if all the
+            // terms/facets are empty, setting them to null should just be
+            // an optimization to skip the loop below
+            searchResultsIdSet = null;
+        } else if (isIndexedVersion()) {
             // We run the search even if no search term and/or facets are
             // specified - to generate the facet labels list:
             searchResultsIdSet = getFileIdsInVersionFromSolr(workingVersion.getId(), this.fileLabelSearchTerm);
-            // But, if no search terms were specified, we can immediately return the full
-            // list of the files in the version:
-            if (StringUtil.isEmpty(fileLabelSearchTerm)
-                    && StringUtil.isEmpty(fileTypeFacet)
-                    && StringUtil.isEmpty(fileAccessFacet)
-                    && StringUtil.isEmpty(fileTagsFacet)) {
-                if ((StringUtil.isEmpty(fileSortField) || fileSortField.equals("name")) && StringUtil.isEmpty(fileSortOrder)) {
-                    return workingVersion.getFileMetadatasSorted();
-                } else {
-                    searchResultsIdSet = null;
-                }
-            }
-
-        } else {
+        } else if (!StringUtil.isEmpty(this.fileLabelSearchTerm)) {
             // No, this is not an indexed version.
             // If the search term was specified, we'll run a search in the db;
             // if not - return the full list of files in the version.
             // (no facets without solr!)
-            if (StringUtil.isEmpty(this.fileLabelSearchTerm)) {
-                if ((StringUtil.isEmpty(fileSortField) || fileSortField.equals("name")) && StringUtil.isEmpty(fileSortOrder)) {
-                    return workingVersion.getFileMetadatasSorted();
-                }
-            } else {
-                searchResultsIdSet = getFileIdsInVersionFromDb(workingVersion.getId(), this.fileLabelSearchTerm);
-            }
-        }
-
-        List<FileMetadata> retList = new ArrayList<>();
-
-        for (FileMetadata fileMetadata : workingVersion.getFileMetadatasSorted()) {
-            if (searchResultsIdSet == null || searchResultsIdSet.contains(fileMetadata.getDataFile().getId())) {
-                retList.add(fileMetadata);
-            }
+            searchResultsIdSet = getFileIdsInVersionFromDb(workingVersion.getId(), this.fileLabelSearchTerm);
+        } else {
+            searchResultsIdSet = null;
         }
 
-        if ((StringUtil.isEmpty(fileSortOrder) && !("name".equals(fileSortField)))
-                || ("desc".equals(fileSortOrder) || !("name".equals(fileSortField)))) {
-            sortFileMetadatas(retList);
-
+        final List<FileMetadata> md = workingVersion.getFileMetadatas();
+        final List<FileMetadata> retList;
+        if (searchResultsIdSet == null) {
+            retList = new ArrayList<>(md);
+        } else {
+            retList = md.stream().filter(x -> searchResultsIdSet.contains(x.getDataFile().getId())).collect(Collectors.toList());
         }
-
+        sortFileMetadatas(retList);
         return retList;
     }
 
-    private void sortFileMetadatas(List<FileMetadata> fileList) {
-        if ("name".equals(fileSortField) && "desc".equals(fileSortOrder)) {
-            Collections.sort(fileList, compareByLabelZtoA);
-        } else if ("date".equals(fileSortField)) {
-            if ("desc".equals(fileSortOrder)) {
-                Collections.sort(fileList, compareByOldest);
-            } else {
-                Collections.sort(fileList, compareByNewest);
-            }
-        } else if ("type".equals(fileSortField)) {
-            Collections.sort(fileList, compareByType);
-        } else if ("size".equals(fileSortField)) {
-            Collections.sort(fileList, compareBySize);
-        }
+    private void sortFileMetadatas(final List<FileMetadata> fileList) {
+        
+        final DataFileComparator dfc = new DataFileComparator();
+        final Comparator<FileMetadata> comp = dfc.compareBy(folderPresort, tagPresort, fileSortField, !"desc".equals(fileSortOrder));
+        Collections.sort(fileList, comp);
     }
 
     private Boolean isIndexedVersion = null;
@@ -737,17 +759,29 @@ public boolean isIndexedVersion() {
         if (isIndexedVersion != null) {
             return isIndexedVersion;
         }
+        
+        // Just like on the collection page, facets on the Dataset page can be
+        // disabled instance-wide by an admin:
+        if (settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacets, false)) {
+            return isIndexedVersion = false;
+        }
+        
         // The version is SUPPOSED to be indexed if it's the latest published version, or a
-        // draft. So if none of the above is true, we return false right away:
-
+        // draft. So if none of the above is true, we can return false right away. 
         if (!(workingVersion.isDraft() || isThisLatestReleasedVersion())) {
             return isIndexedVersion = false;
         }
-
-        // ... but if it is the latest published version or a draft, we want to test
-        // and confirm that this version *has* actually been indexed and is searchable
-        // (and that solr is actually up and running!), by running a quick solr search:
-        return isIndexedVersion = isThisVersionSearchable();
+        // If this is the latest published version, we want to confirm that this 
+        // version was successfully indexed after the last publication 
+        
+        if (isThisLatestReleasedVersion()) {
+            return isIndexedVersion = (workingVersion.getDataset().getIndexTime() != null)
+                    && workingVersion.getDataset().getIndexTime().after(workingVersion.getReleaseTime());
+        }
+        
+        // Drafts don't have the indextime stamps set/incremented when indexed, 
+        // so we'll just assume it is indexed, and will then hope for the best.
+        return isIndexedVersion = true;
     }
 
     /**
@@ -803,8 +837,18 @@ public List<FacetLabel> getFileTagsFacetLabels() {
     /**
      * Verifies that solr is running and that the version is indexed and searchable
      * @return boolean
-     */
+     * Commenting out this method for now, since we have decided it was not 
+     * necessary, to query solr just to figure out if we can query solr. We will
+     * rely solely on the latest-relesed status and the indexed timestamp from 
+     * the database for that. - L.A.
+     *
     public boolean isThisVersionSearchable() {
+        // Just like on the collection page, facets on the Dataset page can be
+        // disabled instance-wide by an admin:
+        if (settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacets, false)) {
+            return false;
+        }
+        
         SolrQuery solrQuery = new SolrQuery();
 
         solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, workingVersion.getDataset().getId().toString()));
@@ -839,6 +883,7 @@ public boolean isThisVersionSearchable() {
 
         return false;
     }
+    */
 
     /**
      * Finds the list of numeric datafile ids in the Version specified, by running
@@ -950,10 +995,19 @@ public Set<Long> getFileIdsInVersionFromSolr(Long datasetVersionId, String patte
             logger.fine("Remote Solr Exception: " + ex.getLocalizedMessage());
             String msg = ex.getLocalizedMessage();
             if (msg.contains(SearchFields.FILE_DELETED)) {
+                // This is a backward compatibility hook put in place many versions
+                // ago, to accommodate instances running Solr with schemas that 
+                // don't include this flag yet. Running Solr with an up-to-date
+                // schema has been a hard requirement for a while now; should we 
+                // remove it at this point? - L.A. 
                 fileDeletedFlagNotIndexed = true;
+            } else {
+                isIndexedVersion = false;
+                return resultIds;
             }
         } catch (Exception ex) {
             logger.warning("Solr exception: " + ex.getLocalizedMessage());
+            isIndexedVersion = false; 
             return resultIds;
         }
 
@@ -966,6 +1020,7 @@ public Set<Long> getFileIdsInVersionFromSolr(Long datasetVersionId, String patte
                 queryResponse = solrClientService.getSolrClient().query(solrQuery);
             } catch (Exception ex) {
                 logger.warning("Caught a Solr exception (again!): " + ex.getLocalizedMessage());
+                isIndexedVersion = false; 
                 return resultIds;
             }
         }
@@ -1851,6 +1906,17 @@ public boolean webloaderUploadSupported() {
         return settingsWrapper.isWebloaderUpload() && StorageIO.isDirectUploadEnabled(dataset.getEffectiveStorageDriverId());
     }
 
+    private void setIdByPersistentId() {
+        GlobalId gid = PidUtil.parseAsGlobalID(persistentId);
+        Long id = dvObjectService.findIdByGlobalId(gid, DvObject.DType.Dataset);
+        if (id == null) {
+            id = dvObjectService.findIdByAltGlobalId(gid, DvObject.DType.Dataset);
+        }
+        if (id != null) {
+            this.setId(id);
+        }
+    }
+
     private String init(boolean initFull) {
 
         //System.out.println("_YE_OLDE_QUERY_COUNTER_");  // for debug purposes
@@ -1861,7 +1927,12 @@ private String init(boolean initFull) {
         String nonNullDefaultIfKeyNotFound = "";
         protocol = settingsWrapper.getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
         authority = settingsWrapper.getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound);
-        if (this.getId() != null || versionId != null || persistentId != null) { // view mode for a dataset
+        String sortOrder = getSortOrder();
+        if(sortOrder != null) {
+            FileMetadata.setCategorySortOrder(sortOrder);
+        }
+        
+        if (dataset.getId() != null || versionId != null || persistentId != null) { // view mode for a dataset     
 
             DatasetVersionServiceBean.RetrieveDatasetVersionResponse retrieveDatasetVersionResponse = null;
 
@@ -1869,44 +1940,60 @@ private String init(boolean initFull) {
             // Set the workingVersion and Dataset
             // ---------------------------------------
             if (persistentId != null) {
-                logger.fine("initializing DatasetPage with persistent ID " + persistentId);
-                // Set Working Version and Dataset by PersistentID
-                dataset = datasetService.findByGlobalId(persistentId);
-                if (dataset == null) {
-                    logger.warning("No such dataset: "+persistentId);
-                    return permissionsWrapper.notFound();
-                }
-                logger.fine("retrieved dataset, id="+dataset.getId());
-
-                retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version);
-                //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByPersistentId(persistentId, version);
-                this.workingVersion = retrieveDatasetVersionResponse.getDatasetVersion();
-                logger.fine("retrieved version: id: " + workingVersion.getId() + ", state: " + this.workingVersion.getVersionState());
-
-            } else if (this.getId() != null) {
+                setIdByPersistentId();
+            }
+            
+            if (this.getId() != null) {
                 // Set Working Version and Dataset by Datasaet Id and Version
+                
+                // We are only performing these lookups to obtain the database id
+                // of the version that we are displaying, and then we will use it
+                // to perform a .findDeep(versionId); see below. 
+                
+                // TODO: replace the code block below, the combination of 
+                // datasetService.find(id) and datasetVersionService.selectRequestedVersion()
+                // with some optimized, direct query-based way of obtaining 
+                // the numeric id of the requested DatasetVersion (and that's 
+                // all we need, we are not using any of the entities produced 
+                // below. 
+                
                 dataset = datasetService.find(this.getId());
+                
                 if (dataset == null) {
                     logger.warning("No such dataset: "+dataset);
                     return permissionsWrapper.notFound();
                 }
                 //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionById(dataset.getId(), version);
                 retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version);
+                if (retrieveDatasetVersionResponse == null) {
+                    return permissionsWrapper.notFound();
+                }
                 this.workingVersion = retrieveDatasetVersionResponse.getDatasetVersion();
-                logger.info("retreived version: id: " + workingVersion.getId() + ", state: " + this.workingVersion.getVersionState());
-
-            } else if (versionId != null) {
-                // TODO: 4.2.1 - this method is broken as of now!
-                // Set Working Version and Dataset by DatasaetVersion Id
-                //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByVersionId(versionId);
-
+                logger.fine("retrieved version: id: " + workingVersion.getId() + ", state: " + this.workingVersion.getVersionState());
+                
+                versionId = workingVersion.getId();
+
+                this.workingVersion = null;
+                this.dataset = null;
+
+            } 
+            
+            // ... And now the "real" working version lookup: 
+            
+            if (versionId != null) {
+                this.workingVersion = datasetVersionService.findDeep(versionId);
+                dataset = workingVersion.getDataset();
+            }
+            
+            if (workingVersion == null) {
+                logger.warning("Failed to retrieve version");
+                return permissionsWrapper.notFound();
             }
+            
             this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
 
 
-            if (retrieveDatasetVersionResponse == null) {
-                return permissionsWrapper.notFound();
-            }
+            
 
             switch (selectTab){
                 case "dataFilesTab":
@@ -1923,16 +2010,6 @@ private String init(boolean initFull) {
                     break;
             }
 
-            //this.dataset = this.workingVersion.getDataset();
-
-            // end: Set the workingVersion and Dataset
-            // ---------------------------------------
-            // Is the DatasetVersion or Dataset null?
-            //
-            if (workingVersion == null || this.dataset == null) {
-                return permissionsWrapper.notFound();
-            }
-
             // Is the Dataset harvested?
 
             if (dataset.isHarvested()) {
@@ -1960,7 +2037,7 @@ private String init(boolean initFull) {
                 return permissionsWrapper.notAuthorized();
             }
 
-            if (!retrieveDatasetVersionResponse.wasRequestedVersionRetrieved()) {
+            if (retrieveDatasetVersionResponse != null && !retrieveDatasetVersionResponse.wasRequestedVersionRetrieved()) {
                 //msg("checkit " + retrieveDatasetVersionResponse.getDifferentVersionMessage());
                 JsfHelper.addWarningMessage(retrieveDatasetVersionResponse.getDifferentVersionMessage());//BundleUtil.getStringFromBundle("dataset.message.metadataSuccess"));
             }
@@ -1981,11 +2058,6 @@ private String init(boolean initFull) {
                 // init the list of FileMetadatas
                 if (workingVersion.isDraft() && canUpdateDataset()) {
                     readOnly = false;
-                } else {
-                    // an attempt to retreive both the filemetadatas and datafiles early on, so that
-                    // we don't have to do so later (possibly, many more times than necessary):
-                    AuthenticatedUser au = session.getUser() instanceof AuthenticatedUser ? (AuthenticatedUser) session.getUser() : null;
-                    datafileService.findFileMetadataOptimizedExperimental(dataset, workingVersion, au);
                 }
                 // This will default to all the files in the version, if the search term
                 // parameter hasn't been specified yet:
@@ -2049,7 +2121,7 @@ private String init(boolean initFull) {
             if ( isEmpty(dataset.getIdentifier()) && systemConfig.directUploadEnabled(dataset) ) {
             	CommandContext ctxt = commandEngine.getContext();
             	GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(ctxt);
-                dataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(dataset, idServiceBean));
+                dataset.setIdentifier(idServiceBean.generateDatasetIdentifier(dataset));
             }
             dataverseTemplates.addAll(dataverseService.find(ownerId).getTemplates());
             if (!dataverseService.find(ownerId).isTemplateRoot()) {
@@ -2112,23 +2184,27 @@ private String init(boolean initFull) {
         displayLockInfo(dataset);
         displayPublishMessage();
 
+        // TODO: replace this loop, and the loop in the method that calculates 
+        // the total "originals" size of the dataset with direct custom queries; 
+        // then we'll be able to drop the lookup hint for DataTable from the 
+        // findDeep() method for the version and further speed up the lookup 
+        // a little bit.
+        boolean globusDownloadEnabled = systemConfig.isGlobusDownload();
         for (FileMetadata fmd : workingVersion.getFileMetadatas()) {
-            if (fmd.getDataFile().isTabularData()) {
+            DataFile df = fmd.getDataFile();
+            if (df.isTabularData()) {
                 versionHasTabular = true;
-                break;
             }
-        }
-        for(DataFile f : dataset.getFiles()) {
-            // TODO: Consider uncommenting this optimization.
-//            if (versionHasTabular) {
-//                hasTabular = true;
-//                break;
-//            }
-            if(f.isTabularData()) {
-                hasTabular = true;
+            if(globusDownloadEnabled) {
+                if(GlobusAccessibleStore.isGlobusAccessible(DataAccess.getStorageDriverFromIdentifier(df.getStorageIdentifier()))) {
+                    versionHasGlobus= true;
+                }
+            }
+            if(versionHasTabular &&(!globusDownloadEnabled || versionHasGlobus)) {
                 break;
             }
         }
+        
         //Show ingest success message if refresh forces a page reload after ingest success
         //This is needed to display the explore buttons (the fileDownloadHelper needs to be reloaded via page
         if (showIngestSuccess) {
@@ -2138,7 +2214,9 @@ private String init(boolean initFull) {
         configureTools = externalToolService.findFileToolsByType(ExternalTool.Type.CONFIGURE);
         exploreTools = externalToolService.findFileToolsByType(ExternalTool.Type.EXPLORE);
         previewTools = externalToolService.findFileToolsByType(ExternalTool.Type.PREVIEW);
+        fileQueryTools = externalToolService.findFileToolsByType(ExternalTool.Type.QUERY);
         datasetExploreTools = externalToolService.findDatasetToolsByType(ExternalTool.Type.EXPLORE);
+        datasetConfigureTools = externalToolService.findDatasetToolsByType(ExternalTool.Type.CONFIGURE);
         rowsPerPage = 10;
         if (dataset.getId() != null && canUpdateDataset()) {
             hasRestrictedFiles = workingVersion.isHasRestrictedFile();
@@ -2171,10 +2249,29 @@ private void displayPublishMessage(){
         if (workingVersion.isDraft() && workingVersion.getId() != null && canUpdateDataset() 
                 && !dataset.isLockedFor(DatasetLock.Reason.finalizePublication)
               &&   (canPublishDataset() || !dataset.isLockedFor(DatasetLock.Reason.InReview) )){
-            JsfHelper.addWarningMessage(datasetService.getReminderString(dataset, canPublishDataset()));
+            JsfHelper.addWarningMessage(datasetService.getReminderString(dataset, canPublishDataset(), false, isValid()));
         }               
     }
 
+    Boolean valid = null;
+
+    public boolean isValid() {
+        if (valid == null) {
+            DatasetVersion version = dataset.getLatestVersion();
+            if (!version.isDraft()) {
+                valid = true;
+            }
+            DatasetVersion newVersion = version.cloneDatasetVersion();
+            newVersion.setDatasetFields(newVersion.initDatasetFields());
+            valid = newVersion.isValid();
+        }
+        return valid;
+    }
+
+    public boolean isValidOrCanReviewIncomplete() {
+        return isValid() || JvmSettings.UI_ALLOW_REVIEW_INCOMPLETE.lookupOptional(Boolean.class).orElse(false);
+    }
+
     private void displayLockInfo(Dataset dataset) {
         // Various info messages, when the dataset is locked (for various reasons):
         if (dataset.isLocked() && canUpdateDataset()) {
@@ -2243,6 +2340,19 @@ private void displayLockInfo(Dataset dataset) {
 
     }
 
+    public String getSortOrder() {
+        return settingsWrapper.getValueForKey(SettingsServiceBean.Key.CategoryOrder, null);
+    }
+    
+    public boolean orderByFolder() {
+        return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.OrderByFolder, true);
+    }
+    
+    public boolean allowUserManagementOfOrder() {
+        return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.AllowUserManagementOfOrder, false);
+    }
+
+
     private Boolean fileTreeViewRequired = null;
 
     public boolean isFileTreeViewRequired() {
@@ -2265,6 +2375,7 @@ public String getFileDisplayMode() {
     }
 
     public void setFileDisplayMode(String fileDisplayMode) {
+        isPageFlip = true;
         if ("Table".equals(fileDisplayMode)) {
             this.fileDisplayMode = FileDisplayStyle.TABLE;
         } else {
@@ -2276,13 +2387,6 @@ public boolean isFileDisplayTable() {
         return fileDisplayMode == FileDisplayStyle.TABLE;
     }
 
-    public void toggleFileDisplayMode() {
-        if (fileDisplayMode == FileDisplayStyle.TABLE) {
-            fileDisplayMode = FileDisplayStyle.TREE;
-        } else {
-            fileDisplayMode = FileDisplayStyle.TABLE;
-        }
-    }
     public boolean isFileDisplayTree() {
         return fileDisplayMode == FileDisplayStyle.TREE;
     }
@@ -2386,13 +2490,17 @@ private DefaultTreeNode createFileTreeNode(FileMetadata fileMetadata, TreeNode p
         return fileNode;
     }
 
-    public boolean isHasTabular() {
+    /*public boolean isHasTabular() {
         return hasTabular;
-    }
+    }*/
 
     public boolean isVersionHasTabular() {
         return versionHasTabular;
     }
+    
+    public boolean isVersionHasGlobus() {
+        return versionHasGlobus;
+    }
 
     public boolean isReadOnly() {
         return readOnly;
@@ -2802,54 +2910,57 @@ public void refresh(ActionEvent e) {
         refresh();
     }
 
+
+    public void sort() {
+        // This is called as the presort checkboxes' listener when the user is actually
+        // clicking in the checkbox. It does appear to happen after the setTagPresort
+        // and setFolderPresort calls.
+        // So -we know this isn't a pageflip and at this point can update to use the new
+        // values.
+        isPageFlip = false;
+        if (!newTagPresort == tagPresort) {
+            tagPresort = newTagPresort;
+        }
+        if (!newFolderPresort == folderPresort) {
+            folderPresort = newFolderPresort;
+        }
+        sortFileMetadatas(fileMetadatasSearch);
+        JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("file.results.presort.change.success"));
+    }
+
     public String refresh() {
         logger.fine("refreshing");
 
+        //In v5.14, versionId was null here. In 6.0, it appears not to be.
+        //This check is to handle the null if it reappears/occurs under other circumstances
+        if(versionId==null) {
+            logger.warning("versionId was null in refresh");
+            versionId = workingVersion.getId();
+        }
         //dataset = datasetService.find(dataset.getId());
         dataset = null;
+        workingVersion = null; 
 
         logger.fine("refreshing working version");
 
         DatasetVersionServiceBean.RetrieveDatasetVersionResponse retrieveDatasetVersionResponse = null;
 
-        if (persistentId != null) {
-            //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByPersistentId(persistentId, version);
-            dataset = datasetService.findByGlobalId(persistentId);
-            retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version);
-        } else if (versionId != null) {
-            retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionByVersionId(versionId);
-        } else if (dataset.getId() != null) {
-            //retrieveDatasetVersionResponse = datasetVersionService.retrieveDatasetVersionById(dataset.getId(), version);
-            dataset = datasetService.find(dataset.getId());
-            retrieveDatasetVersionResponse = datasetVersionService.selectRequestedVersion(dataset.getVersions(), version);
-        }
+        if (versionId != null) {
+            // versionId must have been set by now (see null check above), in the init()
+            // method, regardless of how the page was originally called - by the dataset
+            // database id, by the persistent identifier, or by the db id of the version.
+            this.workingVersion = datasetVersionService.findDeep(versionId);
+            dataset = workingVersion.getDataset();
+        } 
+        
 
-        if (retrieveDatasetVersionResponse == null) {
+        if (this.workingVersion == null) {
             // TODO:
             // should probably redirect to the 404 page, if we can't find
             // this version anymore.
             // -- L.A. 4.2.3
             return "";
         }
-        this.workingVersion = retrieveDatasetVersionResponse.getDatasetVersion();
-
-        if (this.workingVersion == null) {
-            // TODO:
-            // same as the above
-
-            return "";
-        }
-
-        if (dataset == null) {
-            // this would be the case if we were retrieving the version by
-            // the versionId, above.
-            this.dataset = this.workingVersion.getDataset();
-        }
-
-        if (readOnly) {
-            AuthenticatedUser au = session.getUser() instanceof AuthenticatedUser ? (AuthenticatedUser) session.getUser() : null;
-            datafileService.findFileMetadataOptimizedExperimental(dataset, workingVersion, au);
-        }
 
         fileMetadatasSearch = selectFileMetadatasForDisplay();
 
@@ -2862,9 +2973,9 @@ public String refresh() {
             //SEK 12/20/2019 - since we are ingesting a file we know that there is a current draft version
             lockedDueToIngestVar = null;
             if (canViewUnpublishedDataset()) {
-                return "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&showIngestSuccess=true&version=DRAFT&faces-redirect=true";
+                return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&showIngestSuccess=true&version=DRAFT&faces-redirect=true";
             } else {
-                return "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&showIngestSuccess=true&faces-redirect=true";
+                return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&showIngestSuccess=true&faces-redirect=true";
             }
         }
 
@@ -2996,6 +3107,26 @@ public void setSelectedNonDownloadableFiles(List<FileMetadata> selectedNonDownlo
         this.selectedNonDownloadableFiles = selectedNonDownloadableFiles;
     }
 
+    private List<FileMetadata> selectedGlobusTransferableFiles;
+
+    public List<FileMetadata> getSelectedGlobusTransferableFiles() {
+        return selectedGlobusTransferableFiles;
+    }
+
+    public void setSelectedGlobusTransferableFiles(List<FileMetadata> selectedGlobusTransferableFiles) {
+        this.selectedGlobusTransferableFiles = selectedGlobusTransferableFiles;
+    }
+    
+    private List<FileMetadata> selectedNonGlobusTransferableFiles;
+
+    public List<FileMetadata> getSelectedNonGlobusTransferableFiles() {
+        return selectedNonGlobusTransferableFiles;
+    }
+
+    public void setSelectedNonGlobusTransferableFiles(List<FileMetadata> selectedNonGlobusTransferableFiles) {
+        this.selectedNonGlobusTransferableFiles = selectedNonGlobusTransferableFiles;
+    }
+    
     public String getSizeOfDataset() {
         return DatasetUtil.getDownloadSize(workingVersion, false);
     }
@@ -3028,19 +3159,32 @@ public void setTooLargeToDownload(boolean tooLargeToDownload) {
         this.tooLargeToDownload = tooLargeToDownload;
     }
 
+    private Long sizeOfDatasetArchival = null; 
+    private Long sizeOfDatasetOriginal = null; 
+    
+    
     public Long getSizeOfDatasetNumeric() {
-        if (this.hasTabular){
+        if (this.versionHasTabular){
             return Math.min(getSizeOfDatasetOrigNumeric(), getSizeOfDatasetArchivalNumeric());
         }
         return getSizeOfDatasetOrigNumeric();
     }
 
     public Long getSizeOfDatasetOrigNumeric() {
-        return DatasetUtil.getDownloadSizeNumeric(workingVersion, true);
+        if (versionHasTabular) {
+            if (sizeOfDatasetOriginal == null) {
+                sizeOfDatasetOriginal = DatasetUtil.getDownloadSizeNumeric(workingVersion, true);
+            }
+            return sizeOfDatasetOriginal;
+        }
+        return getSizeOfDatasetArchivalNumeric();
     }
 
     public Long getSizeOfDatasetArchivalNumeric() {
-        return DatasetUtil.getDownloadSizeNumeric(workingVersion, false);
+        if (sizeOfDatasetArchival == null) {
+            sizeOfDatasetArchival = DatasetUtil.getDownloadSizeNumeric(workingVersion, false);
+        }
+        return sizeOfDatasetArchival; 
     }
 
     public String getSizeOfSelectedAsString(){
@@ -3092,9 +3236,9 @@ public void startDownloadSelectedOriginal() {
 
     private void startDownload(boolean downloadOriginal){
         boolean guestbookRequired = isDownloadPopupRequired();
-        boolean validate = validateFilesForDownload(guestbookRequired, downloadOriginal);
+        boolean validate = validateFilesForDownload(downloadOriginal);
         if (validate) {
-            updateGuestbookResponse(guestbookRequired, downloadOriginal);
+            updateGuestbookResponse(guestbookRequired, downloadOriginal, false);
             if(!guestbookRequired && !getValidateFilesOutcome().equals("Mixed")){
                 startMultipleFileDownload();
             }
@@ -3115,9 +3259,14 @@ public void setValidateFilesOutcome(String validateFilesOutcome) {
         this.validateFilesOutcome = validateFilesOutcome;
     }
 
-    public boolean validateFilesForDownload(boolean guestbookRequired, boolean downloadOriginal) {
-        setSelectedDownloadableFiles(new ArrayList<>());
-        setSelectedNonDownloadableFiles(new ArrayList<>());
+    public boolean validateFilesForDownload(boolean downloadOriginal){ 
+        if (this.selectedFiles.isEmpty()) {
+            PrimeFaces.current().executeScript("PF('selectFilesForDownload').show()");
+            return false;
+        } else {
+            this.filterSelectedFiles();
+        }
+        
         //assume Pass unless something bad happens
         setValidateFilesOutcome("Pass");
         Long bytes = (long) 0;
@@ -3127,22 +3276,17 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl
             return false;
         }
 
-        for (FileMetadata fmd : this.selectedFiles) {
-            if (this.fileDownloadHelper.canDownloadFile(fmd)) {
-                getSelectedDownloadableFiles().add(fmd);
-                DataFile dataFile = fmd.getDataFile();
-                if (downloadOriginal && dataFile.isTabularData()) {
-                    bytes += dataFile.getOriginalFileSize() == null ? 0 : dataFile.getOriginalFileSize();
-                } else {
-                    bytes += dataFile.getFilesize();
-                }
+        for (FileMetadata fmd : getSelectedDownloadableFiles()) {
+            DataFile dataFile = fmd.getDataFile();
+            if (downloadOriginal && dataFile.isTabularData()) {
+                bytes += dataFile.getOriginalFileSize() == null ? 0 : dataFile.getOriginalFileSize();
             } else {
-                getSelectedNonDownloadableFiles().add(fmd);
+                bytes += dataFile.getFilesize();
             }
         }
 
-        //if there are two or more files with a total size
-        //over the zip limit post a "too large" popup
+        //if there are two or more files, with a total size
+        //over the zip limit, post a "too large" popup
         if (bytes > settingsWrapper.getZipDownloadLimit() && selectedDownloadableFiles.size() > 1) {
             setValidateFilesOutcome("FailSize");
             return false;
@@ -3151,41 +3295,126 @@ public boolean validateFilesForDownload(boolean guestbookRequired, boolean downl
         // If some of the files were restricted and we had to drop them off the
         // list, and NONE of the files are left on the downloadable list
         // - we show them a "you're out of luck" popup:
-        if (getSelectedDownloadableFiles().isEmpty() && !getSelectedNonDownloadableFiles().isEmpty()) {
+        if (getSelectedDownloadableFiles().isEmpty() && getSelectedGlobusTransferableFiles().isEmpty() && !getSelectedNonDownloadableFiles().isEmpty()) {
             setValidateFilesOutcome("FailRestricted");
             return false;
         }
 
-        if (!getSelectedDownloadableFiles().isEmpty() && !getSelectedNonDownloadableFiles().isEmpty()) {
+        //Some are selected and there are non-downloadable ones or there are both downloadable and globus transferable files
+        if ((!(getSelectedDownloadableFiles().isEmpty() && getSelectedGlobusTransferableFiles().isEmpty())
+                && (!getSelectedNonDownloadableFiles().isEmpty()) || (!getSelectedDownloadableFiles().isEmpty() && !getSelectedGlobusTransferableFiles().isEmpty()))) {
             setValidateFilesOutcome("Mixed");
             return true;
         }
-
-        if (guestbookRequired) {
+        //ToDo - should Mixed not trigger this?
+        if (isTermsPopupRequired() || isGuestbookPopupRequiredAtDownload()) {
             setValidateFilesOutcome("GuestbookRequired");
         }
-
         return true;
 
     }
 
-    private void updateGuestbookResponse (boolean guestbookRequired, boolean downloadOriginal) {
+    private void updateGuestbookResponse (boolean guestbookRequired, boolean downloadOriginal, boolean isGlobusTransfer) {
         // Note that the GuestbookResponse object may still have information from
         // the last download action performed by the user. For example, it may
         // still have the non-null Datafile in it, if the user has just downloaded
         // a single file; or it may still have the format set to "original" -
         // even if that's not what they are trying to do now.
         // So make sure to reset these values:
-        guestbookResponse.setDataFile(null);
-        guestbookResponse.setSelectedFileIds(getSelectedDownloadableFilesIdsString());
+        if(fileMetadataForAction == null) {
+            guestbookResponse.setDataFile(null);
+        } else {
+            guestbookResponse.setDataFile(fileMetadataForAction.getDataFile());
+        }
+        if(isGlobusTransfer) {
+            guestbookResponse.setSelectedFileIds(getFilesIdsString(getSelectedGlobusTransferableFiles()));
+        } else {
+            guestbookResponse.setSelectedFileIds(getSelectedDownloadableFilesIdsString());
+        }
         if (downloadOriginal) {
             guestbookResponse.setFileFormat("original");
         } else {
             guestbookResponse.setFileFormat("");
         }
-        guestbookResponse.setDownloadtype("Download");
+        guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD);
     }
 
+    /*helper function to filter the selected files into <selected downloadable>, 
+    and <selected, non downloadable> and <selected restricted> for reuse*/
+
+    /*helper function to filter the selected files into <selected downloadable>, 
+    and <selected, non downloadable> and <selected restricted> for reuse*/
+
+    private boolean filterSelectedFiles(){
+        setSelectedDownloadableFiles(new ArrayList<>());
+        setSelectedNonDownloadableFiles(new ArrayList<>());
+        setSelectedRestrictedFiles(new ArrayList<>());
+        setSelectedUnrestrictedFiles(new ArrayList<>());
+        setSelectedGlobusTransferableFiles(new ArrayList<>());
+        setSelectedNonGlobusTransferableFiles(new ArrayList<>());
+
+        boolean someFiles = false;
+        boolean globusDownloadEnabled = settingsWrapper.isGlobusDownload();
+        for (FileMetadata fmd : this.selectedFiles){
+            boolean downloadable=this.fileDownloadHelper.canDownloadFile(fmd);
+            
+            boolean globusTransferable = false;
+            if(globusDownloadEnabled) {
+                String driverId = DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier());
+                globusTransferable = GlobusAccessibleStore.isGlobusAccessible(driverId);
+                downloadable = downloadable && !AbstractRemoteOverlayAccessIO.isNotDataverseAccessible(driverId); 
+            }
+            if(downloadable){
+                getSelectedDownloadableFiles().add(fmd);
+                someFiles=true;
+            } else {
+                getSelectedNonDownloadableFiles().add(fmd);
+            }
+            if(globusTransferable) {
+                getSelectedGlobusTransferableFiles().add(fmd);
+                someFiles=true;
+            } else {
+                getSelectedNonGlobusTransferableFiles().add(fmd);
+            }
+            if(fmd.isRestricted()){
+                getSelectedRestrictedFiles().add(fmd); //might be downloadable to user or not
+                someFiles=true;
+            } else {
+                getSelectedUnrestrictedFiles().add(fmd);
+                someFiles=true;
+            }
+
+        }
+        return someFiles;
+    }
+
+    public void validateFilesForRequestAccess(){
+        this.filterSelectedFiles();
+
+        if(!dataset.isFileAccessRequest()){ //is this needed? wouldn't be able to click Request Access if this !isFileAccessRequest()
+            return;
+        }
+
+        if(!this.selectedRestrictedFiles.isEmpty()){
+            ArrayList<FileMetadata> nonDownloadableRestrictedFiles = new ArrayList<>();
+
+            List<DataFile> userRequestedDataFiles = ((AuthenticatedUser) session.getUser()).getRequestedDataFiles();
+
+            for(FileMetadata fmd : this.selectedRestrictedFiles){
+                if(!this.fileDownloadHelper.canDownloadFile(fmd) && !userRequestedDataFiles.contains(fmd.getDataFile())){
+                    nonDownloadableRestrictedFiles.add(fmd);
+                }
+            }
+
+            if(!nonDownloadableRestrictedFiles.isEmpty()){
+                guestbookResponse.setDataFile(null);
+                guestbookResponse.setSelectedFileIds(this.getFilesIdsString(nonDownloadableRestrictedFiles));
+                this.requestAccessMultipleFiles();
+            } else {
+                //popup select data files
+            }
+        }
+    }
 
     private boolean selectAllFiles;
 
@@ -3211,26 +3440,23 @@ public void toggleAllSelected(){
 
     // helper Method
     public String getSelectedFilesIdsString() {
-        String downloadIdString = "";
-        for (FileMetadata fmd : this.selectedFiles){
-            if (!StringUtil.isEmpty(downloadIdString)) {
-                downloadIdString += ",";
-            }
-            downloadIdString += fmd.getDataFile().getId();
-        }
-        return downloadIdString;
+        return this.getFilesIdsString(this.selectedFiles);
     }
-
+    
     // helper Method
     public String getSelectedDownloadableFilesIdsString() {
-        String downloadIdString = "";
-        for (FileMetadata fmd : this.selectedDownloadableFiles){
-            if (!StringUtil.isEmpty(downloadIdString)) {
-                downloadIdString += ",";
+        return this.getFilesIdsString(this.selectedDownloadableFiles);
+    }
+    
+    public String getFilesIdsString(List<FileMetadata> fileMetadatas){ //for reuse
+        String idString = "";
+        for (FileMetadata fmd : fileMetadatas){
+            if (!StringUtil.isEmpty(idString)) {
+                idString += ",";
             }
-            downloadIdString += fmd.getDataFile().getId();
+            idString += fmd.getDataFile().getId();
         }
-        return downloadIdString;
+        return idString;
     }
 
 
@@ -3268,7 +3494,7 @@ public void saveLinkingDataverses(ActionEvent evt) {
             FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), linkingDataverseErrorMessage);
             FacesContext.getCurrentInstance().addMessage(null, message);
         }
-
+        alreadyLinkedDataverses = null; //force update to list of linked dataverses
     }
 
     private String linkingDataverseErrorMessage = "";
@@ -3304,7 +3530,23 @@ private Boolean saveLink(Dataverse dataverse){
         }
         return retVal;
     }
-
+        
+    private String alreadyLinkedDataverses = null;
+    
+    public String getAlreadyLinkedDataverses(){
+        if (alreadyLinkedDataverses != null) {           
+            return alreadyLinkedDataverses;
+        }
+        List<Dataverse> dataverseList = dataverseService.findDataversesThatLinkToThisDatasetId(dataset.getId());
+        for (Dataverse dv: dataverseList){
+            if (alreadyLinkedDataverses == null){
+                alreadyLinkedDataverses = dv.getCurrentName();
+            } else {
+                alreadyLinkedDataverses = alreadyLinkedDataverses + ", " + dv.getCurrentName();
+            }
+        }
+        return alreadyLinkedDataverses;
+    }
 
     public List<Dataverse> completeLinkingDataverse(String query) {
         dataset = datasetService.find(dataset.getId());
@@ -3595,7 +3837,7 @@ public String save() {
                 //ToDo - could drop use of selectedTemplate and just use the persistent dataset.getTemplate() 
                 if ( selectedTemplate != null ) {
                     if ( isSessionUserAuthenticated() ) {
-                        cmd = new CreateNewDatasetCommand(dataset, dvRequestService.getDataverseRequest(), false, selectedTemplate);
+                        cmd = new CreateNewDatasetCommand(dataset, dvRequestService.getDataverseRequest(), selectedTemplate);
                     } else {
                         JH.addMessage(FacesMessage.SEVERITY_FATAL, BundleUtil.getStringFromBundle("dataset.create.authenticatedUsersOnly"));
                         return null;
@@ -3622,9 +3864,9 @@ public String save() {
                 ((UpdateDatasetVersionCommand) cmd).setValidateLenient(true);
             }
             dataset = commandEngine.submit(cmd);
-            for (DatasetField df : dataset.getLatestVersion().getDatasetFields()) {
+            for (DatasetField df : dataset.getLatestVersion().getFlatDatasetFields()) {
                 logger.fine("Found id: " + df.getDatasetFieldType().getId());
-                if (fieldService.getCVocConf(false).containsKey(df.getDatasetFieldType().getId())) {
+                if (fieldService.getCVocConf(true).containsKey(df.getDatasetFieldType().getId())) {
                     fieldService.registerExternalVocabValues(df);
                 }
             }
@@ -3791,7 +4033,7 @@ private String returnToLatestVersion(){
          setReleasedVersionTabList(resetReleasedVersionTabList());
          newFiles.clear();
          editMode = null;
-         return "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&version="+ workingVersion.getFriendlyVersionNumber() +  "&faces-redirect=true";
+         return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&version="+ workingVersion.getFriendlyVersionNumber() +  "&faces-redirect=true";
     }
 
     private String returnToDatasetOnly(){
@@ -3801,7 +4043,7 @@ private String returnToDatasetOnly(){
     }
 
     private String returnToDraftVersion(){
-         return "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&version=DRAFT" + "&faces-redirect=true";
+         return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&version=DRAFT" + "&faces-redirect=true";
     }
 
     public String cancel() {
@@ -4414,6 +4656,8 @@ public List< String[]> getExporters(){
             try {
                 exporter = ExportService.getInstance().getExporter(formatName);
             } catch (ExportException ex) {
+                logger.warning("Failed to get : " + formatName);
+                logger.warning(ex.getLocalizedMessage());
                 exporter = null;
             }
             if (exporter != null && exporter.isAvailableToUsers()) {
@@ -4422,7 +4666,7 @@ public List< String[]> getExporters(){
 
                 String[] temp = new String[2];
                 temp[0] = formatDisplayName;
-                temp[1] = myHostURL + "/api/datasets/export?exporter=" + formatName + "&persistentId=" + dataset.getGlobalIdString();
+                temp[1] = myHostURL + "/api/datasets/export?exporter=" + formatName + "&persistentId=" + dataset.getGlobalId().asString();
                 retList.add(temp);
             }
         }
@@ -5042,10 +5286,9 @@ public boolean isFileAccessRequestMultiButtonRequired(){
            // return false;
         }
         for (FileMetadata fmd : workingVersion.getFileMetadatas()){
+            AuthenticatedUser authenticatedUser = (AuthenticatedUser) session.getUser();
             //Change here so that if all restricted files have pending requests there's no Request Button
-            if ((!this.fileDownloadHelper.canDownloadFile(fmd) && (fmd.getDataFile().getFileAccessRequesters() == null
-                    || ( fmd.getDataFile().getFileAccessRequesters() != null
-                 &&   !fmd.getDataFile().getFileAccessRequesters().contains((AuthenticatedUser)session.getUser()))))){
+            if ((!this.fileDownloadHelper.canDownloadFile(fmd) && !fmd.getDataFile().containsActiveFileAccessRequestFromUser(authenticatedUser))) {
                 return true;
             }
         }
@@ -5056,6 +5299,9 @@ public boolean isFileAccessRequestMultiButtonEnabled(){
         if (!isSessionUserAuthenticated() || !dataset.isFileAccessRequest()){
             return false;
         }
+        //populate file lists
+        filterSelectedFiles();
+        
         if( this.selectedRestrictedFiles == null || this.selectedRestrictedFiles.isEmpty() ){
             return false;
         }
@@ -5067,35 +5313,6 @@ public boolean isFileAccessRequestMultiButtonEnabled(){
         return false;
     }
 
-    private Boolean downloadButtonAllEnabled = null;
-
-    public boolean isDownloadAllButtonEnabled() {
-
-        if (downloadButtonAllEnabled == null) {
-            for (FileMetadata fmd : workingVersion.getFileMetadatas()) {
-                if (!this.fileDownloadHelper.canDownloadFile(fmd)) {
-                    downloadButtonAllEnabled = false;
-                    break;
-                }
-            }
-            downloadButtonAllEnabled = true;
-        }
-        return downloadButtonAllEnabled;
-    }
-
-    public boolean isDownloadSelectedButtonEnabled(){
-
-        if( this.selectedFiles == null || this.selectedFiles.isEmpty() ){
-            return false;
-        }
-        for (FileMetadata fmd : this.selectedFiles){
-            if (this.fileDownloadHelper.canDownloadFile(fmd)){
-                return true;
-            }
-        }
-        return false;
-    }
-
     public boolean isFileAccessRequestMultiSignUpButtonRequired(){
         if (isSessionUserAuthenticated()){
             return false;
@@ -5138,7 +5355,24 @@ public boolean isDownloadPopupRequired() {
     public boolean isRequestAccessPopupRequired() {
         return FileUtil.isRequestAccessPopupRequired(workingVersion);
     }
+    
+    public boolean isGuestbookAndTermsPopupRequired() {  
+        return FileUtil.isGuestbookAndTermsPopupRequired(workingVersion);
+    }
 
+    public boolean isGuestbookPopupRequired(){
+        return FileUtil.isGuestbookPopupRequired(workingVersion);
+    }
+    
+    public boolean isTermsPopupRequired(){
+        return FileUtil.isTermsPopupRequired(workingVersion);
+    }
+    
+    public boolean isGuestbookPopupRequiredAtDownload(){
+        // Only show guestbookAtDownload if guestbook at request is disabled (legacy behavior)
+        return isGuestbookPopupRequired() && !workingVersion.getDataset().getEffectiveGuestbookEntryAtRequest();
+    }
+    
     public String requestAccessMultipleFiles() {
 
         if (selectedFiles.isEmpty()) {
@@ -5153,11 +5387,11 @@ public String requestAccessMultipleFiles() {
             for (FileMetadata fmd : selectedFiles){
                  fileDownloadHelper.addMultipleFilesForRequestAccess(fmd.getDataFile());
             }
-            if (isRequestAccessPopupRequired()) {
+            if (isGuestbookAndTermsPopupRequired()) {
                 //RequestContext requestContext = RequestContext.getCurrentInstance();
-                PrimeFaces.current().executeScript("PF('requestAccessPopup').show()");
+                PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show()"); //the popup will call writeGuestbookAndRequestAccess();
                 return "";
-            } else {
+            }else {
                 //No popup required
                 fileDownloadHelper.requestAccessIndirect();
                 return "";
@@ -5342,6 +5576,14 @@ public FileDownloadServiceBean getFileDownloadService() {
     public void setFileDownloadService(FileDownloadServiceBean fileDownloadService) {
         this.fileDownloadService = fileDownloadService;
     }
+    
+    public FileDownloadHelper getFileDownloadHelper() {
+        return fileDownloadHelper;
+    }
+
+    public void setFileDownloadHelper(FileDownloadHelper fileDownloadHelper) {
+        this.fileDownloadHelper = fileDownloadHelper;
+    }
 
 
     public GuestbookResponseServiceBean getGuestbookResponseService() {
@@ -5456,12 +5698,25 @@ public boolean isShowPreviewButton(Long fileId) {
         List<ExternalTool> previewTools = getPreviewToolsForDataFile(fileId);
         return previewTools.size() > 0;
     }
+    
+    public boolean isShowQueryButton(Long fileId) { 
+        DataFile dataFile = datafileService.find(fileId);
+
+        if(dataFile.isRestricted() || !dataFile.isReleased()  || FileUtil.isActivelyEmbargoed(dataFile)){
+            return false;
+        }
+        
+        List<ExternalTool> fileQueryTools = getQueryToolsForDataFile(fileId);
+        return fileQueryTools.size() > 0;
+    }
 
     public List<ExternalTool> getPreviewToolsForDataFile(Long fileId) {
         return getCachedToolsForDataFile(fileId, ExternalTool.Type.PREVIEW);
     }
 
-
+    public List<ExternalTool> getQueryToolsForDataFile(Long fileId) {
+        return getCachedToolsForDataFile(fileId, ExternalTool.Type.QUERY);
+    }
     public List<ExternalTool> getConfigureToolsForDataFile(Long fileId) {
         return getCachedToolsForDataFile(fileId, ExternalTool.Type.CONFIGURE);
     }
@@ -5486,6 +5741,10 @@ public List<ExternalTool> getCachedToolsForDataFile(Long fileId, ExternalTool.Ty
                 cachedToolsByFileId = previewToolsByFileId;
                 externalTools = previewTools;
                 break;
+            case QUERY:
+                cachedToolsByFileId = fileQueryToolsByFileId;
+                externalTools = fileQueryTools;
+                break;    
             default:
                 break;
         }
@@ -5503,6 +5762,18 @@ public List<ExternalTool> getDatasetExploreTools() {
         return datasetExploreTools;
     }
 
+    public List<ExternalTool> getDatasetConfigureTools() {
+        return datasetConfigureTools;
+    }
+
+    public ExternalTool getDatasetConfigureTool() {
+        return datasetConfigureTool;
+    }
+
+    public void setDatasetConfigureTool(ExternalTool datasetConfigureTool) {
+        this.datasetConfigureTool = datasetConfigureTool;
+    }
+
     Boolean thisLatestReleasedVersion = null;
 
     public boolean isThisLatestReleasedVersion() {
@@ -5556,6 +5827,10 @@ public void clearSelection() {
     }
 
     public void fileListingPaginatorListener(PageEvent event) {
+        // Changing to a new page of files - set this so we can ignore changes to the
+        // presort checkboxes. (This gets called before the set calls for the presorts
+        // get called.)
+        isPageFlip=true;
         setFilePaginatorPage(event.getPage());
     }
 
@@ -5672,66 +5947,52 @@ public boolean isSomeVersionArchived() {
         return someVersionArchived;
     }
 
-    private static Date getFileDateToCompare(FileMetadata fileMetadata) {
-        DataFile datafile = fileMetadata.getDataFile();
-
-        if (datafile.isReleased()) {
-            return datafile.getPublicationDate();
+    public boolean isTagPresort() {
+       return this.tagPresort;
         }
 
-        return datafile.getCreateDate();
-    }
-
-    private static final Comparator<FileMetadata> compareByLabelZtoA = new Comparator<FileMetadata>() {
-        @Override
-        public int compare(FileMetadata o1, FileMetadata o2) {
-            return o2.getLabel().toUpperCase().compareTo(o1.getLabel().toUpperCase());
-        }
-    };
-
-    private static final Comparator<FileMetadata> compareByNewest = new Comparator<FileMetadata>() {
-        @Override
-        public int compare(FileMetadata o1, FileMetadata o2) {
-            return getFileDateToCompare(o2).compareTo(getFileDateToCompare(o1));
+        public void setTagPresort(boolean tagPresort) {
+            // Record the new value
+            newTagPresort = tagPresort && (null != getSortOrder());
+            // If this is not a page flip, it should be a real change to the presort
+            // boolean that we should use.
+            if (!isPageFlip) {
+                this.tagPresort = tagPresort && (null != getSortOrder());
+            }
         }
-    };
 
-    private static final Comparator<FileMetadata> compareByOldest = new Comparator<FileMetadata>() {
-        @Override
-        public int compare(FileMetadata o1, FileMetadata o2) {
-            return getFileDateToCompare(o1).compareTo(getFileDateToCompare(o2));
+    public boolean isFolderPresort() {
+        return this.folderPresort;
         }
-    };
 
-    private static final Comparator<FileMetadata> compareBySize = new Comparator<FileMetadata>() {
-        @Override
-        public int compare(FileMetadata o1, FileMetadata o2) {
-            return (new Long(o1.getDataFile().getFilesize())).compareTo(new Long(o2.getDataFile().getFilesize()));
+        public void setFolderPresort(boolean folderPresort) {
+            //Record the new value
+            newFolderPresort = folderPresort && orderByFolder();
+            // If this is not a page flip, it should be a real change to the presort
+            // boolean that we should use.
+            if (!isPageFlip) {
+                this.folderPresort = folderPresort && orderByFolder();
+            }
         }
-    };
 
-    private static final Comparator<FileMetadata> compareByType = new Comparator<FileMetadata>() {
-        @Override
-        public int compare(FileMetadata o1, FileMetadata o2) {
-            String type1 = StringUtil.isEmpty(o1.getDataFile().getFriendlyType()) ? "" : o1.getDataFile().getContentType();
-            String type2 = StringUtil.isEmpty(o2.getDataFile().getFriendlyType()) ? "" : o2.getDataFile().getContentType();
-            return type1.compareTo(type2);
-        }
-    };
 
     public void explore(ExternalTool externalTool) {
         ApiToken apiToken = null;
         User user = session.getUser();
+        apiToken = authService.getValidApiTokenForUser(user);
+        ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataset, apiToken, session.getLocaleCode());
+        PrimeFaces.current().executeScript(externalToolHandler.getExploreScript());
+    }
+
+    public void configure(ExternalTool externalTool) {
+        ApiToken apiToken = null;
+        User user = session.getUser();
+        //Not enabled for PrivateUrlUsers (who wouldn't have write permissions anyway)
         if (user instanceof AuthenticatedUser) {
-            apiToken = authService.findApiTokenByUser((AuthenticatedUser) user);
-        } else if (user instanceof PrivateUrlUser) {
-            PrivateUrlUser privateUrlUser = (PrivateUrlUser) user;
-            PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId());
-            apiToken = new ApiToken();
-            apiToken.setTokenString(privUrl.getToken());
+            apiToken = authService.getValidApiTokenForAuthenticatedUser((AuthenticatedUser) user);
         }
         ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataset, apiToken, session.getLocaleCode());
-        PrimeFaces.current().executeScript(externalToolHandler.getExploreScript());
+        PrimeFaces.current().executeScript(externalToolHandler.getConfigureScript());
     }
 
     private FileMetadata fileMetadataForAction;
@@ -5777,7 +6038,7 @@ public String getEffectiveMetadataLanguage() {
     }
     public String getEffectiveMetadataLanguage(boolean ofParent) {
         String mdLang = ofParent ? dataset.getOwner().getEffectiveMetadataLanguage() : dataset.getEffectiveMetadataLanguage();
-        if (mdLang.equals(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE)) {
+        if (mdLang.equals(DvObjectContainer.UNDEFINED_CODE)) {
             mdLang = settingsWrapper.getDefaultMetadataLanguage();
         }
         return mdLang;
@@ -5785,7 +6046,7 @@ public String getEffectiveMetadataLanguage(boolean ofParent) {
 
     public String getLocaleDisplayName(String code) {
         String displayName = settingsWrapper.getBaseMetadataLanguageMap(false).get(code);
-        if(displayName==null && !code.equals(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE)) {
+        if(displayName==null && !code.equals(DvObjectContainer.UNDEFINED_CODE)) {
             //Default (for cases such as :when a Dataset has a metadatalanguage code but :MetadataLanguages is no longer defined).
             displayName = new Locale(code).getDisplayName();
         }
@@ -5797,7 +6058,7 @@ public Set<Entry<String, String>> getMetadataLanguages() {
     }
     
     public List<String> getVocabScripts() {
-        return fieldService.getVocabScripts(settingsWrapper.getCVocConf());
+        return fieldService.getVocabScripts(settingsWrapper.getCVocConf(false));
     }
 
     public String getFieldLanguage(String languages) {
@@ -6046,25 +6307,55 @@ public boolean downloadingRestrictedFiles() {
         }
         return false;
     }
-    
-    
+
     //Determines whether this Dataset uses a public store and therefore doesn't support embargoed or restricted files
     public boolean isHasPublicStore() {
         return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.PublicInstall, StorageIO.isPublicStore(dataset.getEffectiveStorageDriverId()));
     }
     
-    public void startGlobusTransfer() {
-        ApiToken apiToken = null;
-        User user = session.getUser();
-        if (user instanceof AuthenticatedUser) {
-            apiToken = authService.findApiTokenByUser((AuthenticatedUser) user);
-        } else if (user instanceof PrivateUrlUser) {
-            PrivateUrlUser privateUrlUser = (PrivateUrlUser) user;
-            PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId());
-            apiToken = new ApiToken();
-            apiToken.setTokenString(privUrl.getToken());
+    public boolean isGlobusTransferRequested() {
+        return globusTransferRequested;
+    }
+    
+    /**
+     * Analagous with the startDownload method, this method is called when the user
+     * tries to start a Globus transfer out (~download). The
+     * validateFilesForDownload call checks to see if there are some files that can
+     * be Globus transfered and, if so and there are no files that can't be
+     * transferre, this method will launch the globus transfer app. If there is a
+     * mix of files or if the guestbook popup is required, the method passes back to
+     * the UI so those popup(s) can be shown. Once they are, this method is called
+     * with the popupShown param true and the app will be shown.
+     * 
+     * @param transferAll - when called from the dataset Access menu, this should be
+     *                    true so that all files are included in the processing.
+     *                    When it is called from the file table, the current
+     *                    selection is used and the param should be false.
+     * @param popupShown  - This method is called twice if the the mixed files or
+     *                    guestbook popups are needed. On the first call, popupShown
+     *                    is false so that the transfer is not started and those
+     *                    popups can be shown. On the second call, popupShown is
+     *                    true and processing will occur as long as there are some
+     *                    valid files to transfer.
+     */
+    public void startGlobusTransfer(boolean transferAll, boolean popupShown) {
+        if (transferAll) {
+            this.setSelectedFiles(workingVersion.getFileMetadatas());
+        }
+        boolean guestbookRequired = isDownloadPopupRequired();
+        
+        boolean validated = validateFilesForDownload(true);
+        if (validated) {
+            globusTransferRequested = true;
+            boolean mixed = "Mixed".equals(getValidateFilesOutcome());
+            // transfer is
+            updateGuestbookResponse(guestbookRequired, true, true);
+            if ((!guestbookRequired && !mixed) || popupShown) {
+                boolean doNotSaveGuestbookResponse = workingVersion.isDraft();
+                globusService.writeGuestbookAndStartTransfer(guestbookResponse, doNotSaveGuestbookResponse);
+                globusTransferRequested = false;
+            }
         }
-        PrimeFaces.current().executeScript(globusService.getGlobusDownloadScript(dataset, apiToken));
     }
 
     public String getWebloaderUrlForDataset(Dataset d) {
@@ -6080,5 +6371,29 @@ public String getWebloaderUrlForDataset(Dataset d) {
             return null;
         }
     }
+    
+    /**
+     * Add Signposting
+     * 
+     * @return String
+     */
+
+    String signpostingLinkHeader = null;
+
+    public String getSignpostingLinkHeader() {
+        if ((workingVersion==null) || (!workingVersion.isReleased())) {
+            if(workingVersion==null) {
+                logger.warning("workingVersion was null in getSignpostingLinkHeader");
+            }
+            return null;
+        }
+        if (signpostingLinkHeader == null) {
+            SignpostingResources sr = new SignpostingResources(systemConfig, workingVersion,
+                    JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
+                    JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""));
+            signpostingLinkHeader = sr.getLinks();
+        }
+        return signpostingLinkHeader;
+    }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java b/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java
index f432e4f5bbf..53ea62f566a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetRelMaterial.java
@@ -6,14 +6,14 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Version;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Version;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
index 91ec050fe5c..c6df2a2e1ab 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
@@ -1,5 +1,6 @@
 package edu.harvard.iq.dataverse;
 
+import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -30,21 +31,21 @@
 import java.util.logging.FileHandler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
-import org.apache.commons.lang3.RandomStringUtils;
-import org.ocpsoft.common.util.Strings;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.LockModeType;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+import jakarta.persistence.TypedQuery;
+import org.apache.commons.lang3.StringUtils;
 
 /**
  *
@@ -105,6 +106,38 @@ public Dataset find(Object pk) {
         return em.find(Dataset.class, pk);
     }
 
+    /**
+     * Retrieve a dataset with the deep underlying structure in one query execution.
+     * This is a more optimal choice when accessing files of a dataset.
+     * In a contrast, the find() method does not pre-fetch the file objects and results in point queries when accessing these objects.
+     * Since the files have a deep structure, many queries can be prevented by using the findDeep() method, especially for large datasets
+     * containing many files, and when iterating over all the files.
+     * When you are not going to access the file objects, the default find() method is better because of the lazy loading.
+     * @return a dataset with pre-fetched file objects
+     */
+    public Dataset findDeep(Object pk) {
+        return (Dataset) em.createNamedQuery("Dataset.findById")
+            .setParameter("id", pk)
+            // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files
+            .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest")
+            .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset")
+            .setHint("eclipselink.left-join-fetch", "o.files.dataTables")
+            .setHint("eclipselink.left-join-fetch", "o.files.auxiliaryFiles")
+            .setHint("eclipselink.left-join-fetch", "o.files.ingestReports")
+            .setHint("eclipselink.left-join-fetch", "o.files.dataFileTags")
+            .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas")
+            .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.fileCategories")
+            //.setHint("eclipselink.left-join-fetch", "o.files.guestbookResponses")
+            .setHint("eclipselink.left-join-fetch", "o.files.embargo")
+            .setHint("eclipselink.left-join-fetch", "o.files.fileAccessRequests")
+            .setHint("eclipselink.left-join-fetch", "o.files.owner")
+            .setHint("eclipselink.left-join-fetch", "o.files.releaseUser")
+            .setHint("eclipselink.left-join-fetch", "o.files.creator")
+            .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers")
+            .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments")
+            .getSingleResult();
+    }
+    
     public List<Dataset> findByOwnerId(Long ownerId) {
         return findByOwnerId(ownerId, false);
     }
@@ -199,8 +232,10 @@ public List<Long> findAllUnindexed() {
     }
 
     //Used in datasets listcurationstatus API
-    public List<Dataset> findAllUnpublished() {
-        return em.createQuery("SELECT object(o) FROM Dataset o, DvObject d WHERE d.id=o.id and d.publicationDate IS null ORDER BY o.id ASC", Dataset.class).getResultList();
+    public List<Dataset> findAllWithDraftVersion() {
+        TypedQuery<Dataset> query = em.createQuery("SELECT object(d) FROM Dataset d, DatasetVersion v WHERE d.id=v.dataset.id and v.versionState=:state ORDER BY d.id ASC", Dataset.class);
+        query.setParameter("state", VersionState.DRAFT);
+        return query.getResultList();
     }
 
     /**
@@ -280,12 +315,12 @@ public Dataset merge( Dataset ds ) {
     }
 
     public Dataset findByGlobalId(String globalId) {
-        Dataset retVal = (Dataset) dvObjectService.findByGlobalId(globalId, "Dataset");
+        Dataset retVal = (Dataset) dvObjectService.findByGlobalId(globalId, DvObject.DType.Dataset);
         if (retVal != null){
             return retVal;
         } else {
             //try to find with alternative PID
-            return (Dataset) dvObjectService.findByGlobalId(globalId, "Dataset", true);
+            return (Dataset) dvObjectService.findByAltGlobalId(globalId, DvObject.DType.Dataset);
         }
     }
 
@@ -295,7 +330,7 @@ public Dataset findByGlobalId(String globalId) {
      * in the dataset components, a ConstraintViolationException will be thrown,
      * which can be further parsed to detect the specific offending values.
      * @param id the id of the dataset
-     * @throws javax.validation.ConstraintViolationException
+     * @throws ConstraintViolationException
      */
 
     @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
@@ -316,85 +351,11 @@ public void instantiateDatasetInNewTransaction(Long id, boolean includeVariables
         }
     }
 
-    public String generateDatasetIdentifier(Dataset dataset, GlobalIdServiceBean idServiceBean) {
-        String identifierType = settingsService.getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "randomString");
-        String shoulder = settingsService.getValueForKey(SettingsServiceBean.Key.Shoulder, "");
-
-        switch (identifierType) {
-            case "randomString":
-                return generateIdentifierAsRandomString(dataset, idServiceBean, shoulder);
-            case "storedProcGenerated":
-                return generateIdentifierFromStoredProcedure(dataset, idServiceBean, shoulder);
-            default:
-                /* Should we throw an exception instead?? -- L.A. 4.6.2 */
-                return generateIdentifierAsRandomString(dataset, idServiceBean, shoulder);
-        }
-    }
-
-    private String generateIdentifierAsRandomString(Dataset dataset, GlobalIdServiceBean idServiceBean, String shoulder) {
-        String identifier = null;
-        do {
-            identifier = shoulder + RandomStringUtils.randomAlphanumeric(6).toUpperCase();
-        } while (!isIdentifierLocallyUnique(identifier, dataset));
-
-        return identifier;
-    }
-
-    private String generateIdentifierFromStoredProcedure(Dataset dataset, GlobalIdServiceBean idServiceBean, String shoulder) {
-
-        String identifier;
-        do {
-            StoredProcedureQuery query = this.em.createNamedStoredProcedureQuery("Dataset.generateIdentifierFromStoredProcedure");
-            query.execute();
-            String identifierFromStoredProcedure = (String) query.getOutputParameterValue(1);
-            // some diagnostics here maybe - is it possible to determine that it's failing
-            // because the stored procedure hasn't been created in the database?
-            if (identifierFromStoredProcedure == null) {
-                return null;
-            }
-            identifier = shoulder + identifierFromStoredProcedure;
-        } while (!isIdentifierLocallyUnique(identifier, dataset));
-
-        return identifier;
-    }
-
-    /**
-     * Check that a identifier entered by the user is unique (not currently used
-     * for any other study in this Dataverse Network) also check for duplicate
-     * in EZID if needed
-     * @param userIdentifier
-     * @param dataset
-     * @param persistentIdSvc
-     * @return {@code true} if the identifier is unique, {@code false} otherwise.
-     */
-    public boolean isIdentifierUnique(String userIdentifier, Dataset dataset, GlobalIdServiceBean persistentIdSvc) {
-        if ( ! isIdentifierLocallyUnique(userIdentifier, dataset) ) return false; // duplication found in local database
-
-        // not in local DB, look in the persistent identifier service
-        try {
-            return ! persistentIdSvc.alreadyExists(dataset);
-        } catch (Exception e){
-            //we can live with failure - means identifier not found remotely
-        }
-
-        return true;
-    }
-
-    public boolean isIdentifierLocallyUnique(Dataset dataset) {
-        return isIdentifierLocallyUnique(dataset.getIdentifier(), dataset);
-    }
 
-    public boolean isIdentifierLocallyUnique(String identifier, Dataset dataset) {
-        return em.createNamedQuery("Dataset.findByIdentifierAuthorityProtocol")
-            .setParameter("identifier", identifier)
-            .setParameter("authority", dataset.getAuthority())
-            .setParameter("protocol", dataset.getProtocol())
-            .getResultList().isEmpty();
-    }
 
     public Long getMaximumExistingDatafileIdentifier(Dataset dataset) {
         //Cannot rely on the largest table id having the greatest identifier counter
-        long zeroFiles = new Long(0);
+        long zeroFiles = 0L;
         Long retVal = zeroFiles;
         Long testVal;
         List<Object> idResults;
@@ -411,7 +372,7 @@ public Long getMaximumExistingDatafileIdentifier(Dataset dataset) {
                 for (Object raw: idResults){
                     String identifier = (String) raw;
                     identifier =  identifier.substring(identifier.lastIndexOf("/") + 1);
-                    testVal = new Long(identifier) ;
+                    testVal = Long.valueOf(identifier) ;
                     if (testVal > retVal){
                         retVal = testVal;
                     }
@@ -437,7 +398,7 @@ public DatasetVersionUser getDatasetVersionUser(DatasetVersion version, User use
         query.setParameter("userId", au.getId());
         try {
             return query.getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             return null;
         }
     }
@@ -552,7 +513,7 @@ public List<DatasetLock> listLocks(DatasetLock.Reason lockType, AuthenticatedUse
         }
         try {
             return query.getResultList();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             return null;
         }
     }
@@ -633,7 +594,7 @@ public Map<Long, String> getArchiveDescriptionsForHarvestedDatasets(Set<Long> da
             return null;
         }
 
-        String datasetIdStr = Strings.join(datasetIds, ", ");
+        String datasetIdStr = StringUtils.join(datasetIds, ", ");
 
         String qstr = "SELECT d.id, h.archiveDescription FROM harvestingClient h, dataset d WHERE d.harvestingClient_id = h.id AND d.id IN (" + datasetIdStr + ")";
         List<Object[]> searchResults;
@@ -781,10 +742,10 @@ public void exportAllDatasets(boolean forceReExport) {
                         countAll++;
                         try {
                             recordService.exportAllFormatsInNewTransaction(dataset);
-                            exportLogger.info("Success exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalIdString());
+                            exportLogger.info("Success exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalId().asString());
                             countSuccess++;
                         } catch (Exception ex) {
-                            exportLogger.info("Error exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalIdString() + "; " + ex.getMessage());
+                            exportLogger.log(Level.INFO, "Error exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalId().asString() + "; " + ex.getMessage(), ex);
                             countError++;
                         }
                     }
@@ -801,7 +762,6 @@ public void exportAllDatasets(boolean forceReExport) {
         }
 
     }
-    
 
     @Asynchronous
     public void reExportDatasetAsync(Dataset dataset) {
@@ -810,35 +770,31 @@ public void reExportDatasetAsync(Dataset dataset) {
 
     public void exportDataset(Dataset dataset, boolean forceReExport) {
         if (dataset != null) {
-            // Note that the logic for handling a dataset is similar to what is implemented in exportAllDatasets, 
+            // Note that the logic for handling a dataset is similar to what is implemented in exportAllDatasets,
             // but when only one dataset is exported we do not log in a separate export logging file
             if (dataset.isReleased() && dataset.getReleasedVersion() != null && !dataset.isDeaccessioned()) {
 
-                // can't trust dataset.getPublicationDate(), no. 
+                // can't trust dataset.getPublicationDate(), no.
                 Date publicationDate = dataset.getReleasedVersion().getReleaseTime(); // we know this dataset has a non-null released version! Maybe not - SEK 8/19 (We do now! :)
                 if (forceReExport || (publicationDate != null
                         && (dataset.getLastExportTime() == null
                         || dataset.getLastExportTime().before(publicationDate)))) {
                     try {
                         recordService.exportAllFormatsInNewTransaction(dataset);
-                        logger.info("Success exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalIdString());
+                        logger.info("Success exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalId().asString());
                     } catch (Exception ex) {
-                        logger.info("Error exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalIdString() + "; " + ex.getMessage());
+                        logger.log(Level.INFO, "Error exporting dataset: " + dataset.getDisplayName() + " " + dataset.getGlobalId().asString() + "; " + ex.getMessage(), ex);
                     }
                 }
             }
         }
-        
-    }
 
-    public String getReminderString(Dataset dataset, boolean canPublishDataset) {
-        return getReminderString( dataset, canPublishDataset, false);
     }
 
     //get a string to add to save success message
     //depends on page (dataset/file) and user privleges
-    public String getReminderString(Dataset dataset, boolean canPublishDataset, boolean filePage) {
-       
+    public String getReminderString(Dataset dataset, boolean canPublishDataset, boolean filePage, boolean isValid) {
+
         String reminderString;
 
         if (canPublishDataset) {
@@ -863,6 +819,10 @@ public String getReminderString(Dataset dataset, boolean canPublishDataset, bool
             }
         }
 
+        if (!isValid) {
+            reminderString = reminderString + "<br/><b style=\"color:red;\"> " + BundleUtil.getStringFromBundle("dataset.message.incomplete.warning") + "</b>";
+        }
+
         if (reminderString != null) {
             return reminderString;
         } else {
@@ -1019,7 +979,7 @@ public void obtainPersistentIdentifiersForDatafiles(Dataset dataset) {
                     maxIdentifier++;
                     datafile.setIdentifier(datasetIdentifier + "/" + maxIdentifier.toString());
                 } else {
-                    datafile.setIdentifier(fileService.generateDataFileIdentifier(datafile, idServiceBean));
+                    datafile.setIdentifier(idServiceBean.generateDataFileIdentifier(datafile));
                 }
 
                 if (datafile.getProtocol() == null) {
@@ -1166,5 +1126,5 @@ public void deleteHarvestedDataset(Dataset dataset, DataverseRequest request, Lo
             hdLogger.warning("Failed to destroy the dataset");
         }
     }
-
+    
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java b/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java
index f253e1810a1..91a4ff3cf5a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetTopicClass.java
@@ -6,8 +6,8 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.persistence.Column;
-import javax.persistence.Version;
+import jakarta.persistence.Column;
+import jakarta.persistence.Version;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
index c21861a1bf4..5fd963f3931 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java
@@ -3,10 +3,12 @@
 import edu.harvard.iq.dataverse.util.MarkupChecker;
 import edu.harvard.iq.dataverse.util.PersonOrOrgUtil;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.DataFileComparator;
 import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.dataset.DatasetUtil;
 import edu.harvard.iq.dataverse.license.License;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
@@ -15,9 +17,7 @@
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 import edu.harvard.iq.dataverse.workflows.WorkflowComment;
 import java.io.Serializable;
-import java.net.URL;
 import java.sql.Timestamp;
-import java.text.DateFormat;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.*;
@@ -25,38 +25,37 @@
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.Transient;
-import javax.persistence.UniqueConstraint;
-import javax.persistence.Version;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
-import javax.validation.constraints.Size;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.Transient;
+import jakarta.persistence.UniqueConstraint;
+import jakarta.persistence.Version;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.constraints.Size;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -67,7 +66,15 @@
 @NamedQueries({
     @NamedQuery(name = "DatasetVersion.findUnarchivedReleasedVersion",
                query = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.dataset.harvestedFrom IS NULL and o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL"
-    )})
+    ), 
+    @NamedQuery(name = "DatasetVersion.findById", 
+                query = "SELECT o FROM DatasetVersion o LEFT JOIN FETCH o.fileMetadatas WHERE o.id=:id"), 
+    @NamedQuery(name = "DatasetVersion.findByDataset",
+                query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC"), 
+    @NamedQuery(name = "DatasetVersion.findReleasedByDataset",
+                query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId AND o.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC")/*,
+    @NamedQuery(name = "DatasetVersion.findVersionElements",
+                query = "SELECT o.id, o.versionState, o.versionNumber, o.minorVersionNumber FROM DatasetVersion o WHERE o.dataset.id=:datasetId ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC")*/})
     
     
 @Entity
@@ -77,6 +84,7 @@
 public class DatasetVersion implements Serializable {
 
     private static final Logger logger = Logger.getLogger(DatasetVersion.class.getCanonicalName());
+    private static final Validator validator = Validation.buildDefaultValidatorFactory().getValidator();
 
     /**
      * Convenience comparator to compare dataset versions by their version number.
@@ -243,14 +251,34 @@ public List<FileMetadata> getFileMetadatas() {
     }
     
     public List<FileMetadata> getFileMetadatasSorted() {
-        Collections.sort(fileMetadatas, FileMetadata.compareByLabel);
+ 
+        /*
+         * fileMetadatas can sometimes be an
+         * org.eclipse.persistence.indirection.IndirectList When that happens, the
+         * comparator in the Collections.sort below is not called, possibly due to
+         * https://bugs.eclipse.org/bugs/show_bug.cgi?id=446236 which is Java 1.8+
+         * specific Converting to an ArrayList solves the problem, but the longer term
+         * solution may be in avoiding the IndirectList or moving to a new version of
+         * the jar it is in.
+         */
+        if(!(fileMetadatas instanceof ArrayList)) {
+            List<FileMetadata> newFMDs = new ArrayList<FileMetadata>();
+            for(FileMetadata fmd: fileMetadatas) {
+                newFMDs.add(fmd);
+            }
+            setFileMetadatas(newFMDs);
+        }
+        
+        DataFileComparator dfc = new DataFileComparator();
+        Collections.sort(fileMetadatas, dfc.compareBy(true, null!=FileMetadata.getCategorySortOrder(), "name", true));
         return fileMetadatas;
     }
     
     public List<FileMetadata> getFileMetadatasSortedByLabelAndFolder() {
         ArrayList<FileMetadata> fileMetadatasCopy = new ArrayList<>();
         fileMetadatasCopy.addAll(fileMetadatas);
-        Collections.sort(fileMetadatasCopy, FileMetadata.compareByLabelAndFolder);
+        DataFileComparator dfc = new DataFileComparator();
+        Collections.sort(fileMetadatasCopy, dfc.compareBy(true, null!=FileMetadata.getCategorySortOrder(), "name", true));
         return fileMetadatasCopy;
     }
     
@@ -389,7 +417,7 @@ public void setDeaccessionLink(String deaccessionLink) {
     }
 
     public GlobalId getDeaccessionLinkAsGlobalId() {
-        return new GlobalId(deaccessionLink);
+        return PidUtil.parseAsGlobalID(deaccessionLink);
     }
 
     public Date getCreateTime() {
@@ -1367,17 +1395,14 @@ public List<String> getUniqueGrantAgencyValues() {
     }
 
     /**
-     * @return String containing the version's series title
+     * @return List of Strings containing the version's series title(s)
      */
-    public String getSeriesTitle() {
+    public List<String>  getSeriesTitles() {
 
         List<String> seriesNames = getCompoundChildFieldValues(DatasetFieldConstant.series,
                 DatasetFieldConstant.seriesName);
-        if (seriesNames.size() > 1) {
-            logger.warning("More than one series title found for datasetVersion: " + this.id);
-        }
         if (!seriesNames.isEmpty()) {
-            return seriesNames.get(0);
+            return seriesNames;
         } else {
             return null;
         }
@@ -1689,8 +1714,6 @@ public String getSemanticVersion() {
 
     public List<ConstraintViolation<DatasetField>> validateRequired() {
         List<ConstraintViolation<DatasetField>> returnListreturnList = new ArrayList<>();
-        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
-        Validator validator = factory.getValidator();
         for (DatasetField dsf : this.getFlatDatasetFields()) {
             dsf.setValidationMessage(null); // clear out any existing validation message
             Set<ConstraintViolation<DatasetField>> constraintViolations = validator.validate(dsf);
@@ -1704,11 +1727,13 @@ public List<ConstraintViolation<DatasetField>> validateRequired() {
         return returnListreturnList;
     }
     
+    public boolean isValid() {
+        return validate().isEmpty();
+    }
+
     public Set<ConstraintViolation> validate() {
         Set<ConstraintViolation> returnSet = new HashSet<>();
 
-        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
-        Validator validator = factory.getValidator();
 
         for (DatasetField dsf : this.getFlatDatasetFields()) {
             dsf.setValidationMessage(null); // clear out any existing validation message
@@ -1795,7 +1820,7 @@ public String getPublicationDateAsString() {
     // So something will need to be modified to accommodate this. -- L.A.  
     /**
      * We call the export format "Schema.org JSON-LD" and extensive Javadoc can
-     * be found in {@link SchemaDotOrgExporter}.
+     * be found in {@link edu.harvard.iq.dataverse.export.SchemaDotOrgExporter}.
      */
     public String getJsonLd() {
         // We show published datasets only for "datePublished" field below.
@@ -2039,10 +2064,8 @@ public String getJsonLd() {
             for (FileMetadata fileMetadata : fileMetadatasSorted) {
                 JsonObjectBuilder fileObject = NullSafeJsonBuilder.jsonObjectBuilder();
                 String filePidUrlAsString = null;
-                URL filePidUrl = fileMetadata.getDataFile().getGlobalId().toURL();
-                if (filePidUrl != null) {
-                    filePidUrlAsString = filePidUrl.toString();
-                }
+                GlobalId gid = fileMetadata.getDataFile().getGlobalId();
+                filePidUrlAsString = gid != null ? gid.asURL() : null;
                 fileObject.add("@type", "DataDownload");
                 fileObject.add("name", fileMetadata.getLabel());
                 fileObject.add("encodingFormat", fileMetadata.getDataFile().getContentType());
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java
index 98f0d707bdc..b670fb18afc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionConverter.java
@@ -5,12 +5,12 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
index e844a3f1ca8..eca0c84ae84 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionDifference.java
@@ -2,28 +2,29 @@
 
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import edu.harvard.iq.dataverse.datavariable.VarGroup;
-import edu.harvard.iq.dataverse.datavariable.VariableMetadata;
 import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil;
 import edu.harvard.iq.dataverse.util.StringUtil;
 
 import java.util.ArrayList;
 import java.util.Collections;
-import java.util.Collection;
 import java.util.List;
 import java.util.Set;
+import java.util.logging.Logger;
 
 import org.apache.commons.lang3.StringUtils;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.FileUtil;
-
 import java.util.Arrays;
 import java.util.Date;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedList;
 
 /**
  *
  * @author skraffmiller
  */
 public final class DatasetVersionDifference {
+    private static final Logger logger = Logger.getLogger(DatasetVersionDifference.class.getCanonicalName());
 
     private DatasetVersion newVersion;
     private DatasetVersion originalVersion;
@@ -1713,4 +1714,109 @@ public void setDatasetFilesDiffList(List<datasetFileDifferenceItem> datasetFiles
         this.datasetFilesDiffList = datasetFilesDiffList;
     }
 
+    /*
+     * Static methods to compute which blocks have changes between the two
+     * DatasetVersions. Currently used to assess whether 'system metadatablocks'
+     * (protected by a separate key) have changed. (Simplified from the methods
+     * above that track all the individual changes)
+     * 
+     */
+    public static Set<MetadataBlock> getBlocksWithChanges(DatasetVersion newVersion, DatasetVersion originalVersion) {
+        Set<MetadataBlock> changedBlockSet = new HashSet<MetadataBlock>();
+
+        // Compare Data
+        List<DatasetField> newDatasetFields = new LinkedList<DatasetField>(newVersion.getDatasetFields());
+        if (originalVersion == null) {
+            // Every field is new, just list blocks used
+            Iterator<DatasetField> dsfnIter = newDatasetFields.listIterator();
+            while (dsfnIter.hasNext()) {
+                DatasetField dsfn = dsfnIter.next();
+                if (!changedBlockSet.contains(dsfn.getDatasetFieldType().getMetadataBlock())) {
+                    changedBlockSet.add(dsfn.getDatasetFieldType().getMetadataBlock());
+                }
+            }
+
+        } else {
+            List<DatasetField> originalDatasetFields = new LinkedList<DatasetField>(originalVersion.getDatasetFields());
+            Iterator<DatasetField> dsfoIter = originalDatasetFields.listIterator();
+            while (dsfoIter.hasNext()) {
+                DatasetField dsfo = dsfoIter.next();
+                boolean deleted = true;
+                Iterator<DatasetField> dsfnIter = newDatasetFields.listIterator();
+
+                while (dsfnIter.hasNext()) {
+                    DatasetField dsfn = dsfnIter.next();
+                    if (dsfo.getDatasetFieldType().equals(dsfn.getDatasetFieldType())) {
+                        deleted = false;
+                        if (!changedBlockSet.contains(dsfo.getDatasetFieldType().getMetadataBlock())) {
+                            logger.fine("Checking " + dsfo.getDatasetFieldType().getName());
+                            if (dsfo.getDatasetFieldType().isPrimitive()) {
+                                if (fieldsAreDifferent(dsfo, dsfn, false)) {
+                                    logger.fine("Adding block for " + dsfo.getDatasetFieldType().getName());
+                                    changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock());
+                                }
+                            } else {
+                                if (fieldsAreDifferent(dsfo, dsfn, true)) {
+                                    logger.fine("Adding block for " + dsfo.getDatasetFieldType().getName());
+                                    changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock());
+                                }
+                            }
+                        }
+                        dsfnIter.remove();
+                        break; // if found go to next dataset field
+                    }
+                }
+
+                if (deleted) {
+                    logger.fine("Adding block for deleted " + dsfo.getDatasetFieldType().getName());
+                    changedBlockSet.add(dsfo.getDatasetFieldType().getMetadataBlock());
+                }
+                dsfoIter.remove();
+            }
+            // Only fields left are non-matching ones but they may be empty
+            for (DatasetField dsfn : newDatasetFields) {
+                if (!dsfn.isEmpty()) {
+                    logger.fine("Adding block for added " + dsfn.getDatasetFieldType().getName());
+                    changedBlockSet.add(dsfn.getDatasetFieldType().getMetadataBlock());
+                }
+            }
+        }
+        return changedBlockSet;
+    }
+
+    private static boolean fieldsAreDifferent(DatasetField originalField, DatasetField newField, boolean compound) {
+        String originalValue = "";
+        String newValue = "";
+
+        if (compound) {
+            for (DatasetFieldCompoundValue datasetFieldCompoundValueOriginal : originalField
+                    .getDatasetFieldCompoundValues()) {
+                int loopIndex = 0;
+                if (newField.getDatasetFieldCompoundValues().size() >= loopIndex + 1) {
+                    for (DatasetField dsfo : datasetFieldCompoundValueOriginal.getChildDatasetFields()) {
+                        if (!dsfo.getDisplayValue().isEmpty()) {
+                            originalValue += dsfo.getDisplayValue() + ", ";
+                        }
+                    }
+                    for (DatasetField dsfn : newField.getDatasetFieldCompoundValues().get(loopIndex)
+                            .getChildDatasetFields()) {
+                        if (!dsfn.getDisplayValue().isEmpty()) {
+                            newValue += dsfn.getDisplayValue() + ", ";
+                        }
+                    }
+                    if (!originalValue.trim().equals(newValue.trim())) {
+                        return true;
+                    }
+                }
+                loopIndex++;
+            }
+        } else {
+            originalValue = originalField.getDisplayValue();
+            newValue = newField.getDisplayValue();
+            if (!originalValue.equalsIgnoreCase(newValue)) {
+                return true;
+            }
+        }
+        return false;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java
new file mode 100644
index 00000000000..99c3c65e3b8
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java
@@ -0,0 +1,326 @@
+package edu.harvard.iq.dataverse;
+
+import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Tuple;
+import jakarta.persistence.TypedQuery;
+import jakarta.persistence.criteria.*;
+
+import java.io.Serializable;
+import java.sql.Timestamp;
+import java.util.*;
+
+import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes;
+
+@Stateless
+@Named
+public class DatasetVersionFilesServiceBean implements Serializable {
+
+    @PersistenceContext(unitName = "VDCNet-ejbPU")
+    private EntityManager em;
+
+    /**
+     * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas}
+     */
+    public enum FileOrderCriteria {
+        NameAZ, NameZA, Newest, Oldest, Size, Type
+    }
+
+    /**
+     * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileSearchCriteria, FileDownloadSizeMode)}
+     * <p>
+     * All: Includes both archival and original sizes for tabular files
+     * Archival: Includes only the archival size for tabular files
+     * Original: Includes only the original size for tabular files
+     * <p>
+     * All the modes include archival sizes for non-tabular files
+     */
+    public enum FileDownloadSizeMode {
+        All, Original, Archival
+    }
+
+    /**
+     * Given a DatasetVersion, returns its total file metadata count
+     *
+     * @param datasetVersion the DatasetVersion to access
+     * @param searchCriteria for counting only files matching this criteria
+     * @return long value of total file metadata count
+     */
+    public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class);
+        Root<FileMetadata> fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+        criteriaQuery
+                .select(criteriaBuilder.count(fileMetadataRoot))
+                .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot));
+        return em.createQuery(criteriaQuery).getSingleResult();
+    }
+
+    /**
+     * Given a DatasetVersion, returns its file metadata count per content type
+     *
+     * @param datasetVersion the DatasetVersion to access
+     * @param searchCriteria for counting only files matching this criteria
+     * @return Map<String, Long> of file metadata counts per content type
+     */
+    public Map<String, Long> getFileMetadataCountPerContentType(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<Tuple> criteriaQuery = criteriaBuilder.createTupleQuery();
+        Root<FileMetadata> fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+        Path<String> contentType = fileMetadataRoot.get("dataFile").get("contentType");
+        criteriaQuery
+                .multiselect(contentType, criteriaBuilder.count(contentType))
+                .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))
+                .groupBy(contentType);
+        return getStringLongMapResultFromQuery(criteriaQuery);
+    }
+
+    /**
+     * Given a DatasetVersion, returns its file metadata count per category name
+     *
+     * @param datasetVersion the DatasetVersion to access
+     * @param searchCriteria for counting only files matching this criteria
+     * @return Map<String, Long> of file metadata counts per category name
+     */
+    public Map<String, Long> getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<Tuple> criteriaQuery = criteriaBuilder.createTupleQuery();
+        Root<FileMetadata> fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+        Root<DataFileCategory> dataFileCategoryRoot = criteriaQuery.from(DataFileCategory.class);
+        Path<String> categoryName = dataFileCategoryRoot.get("name");
+        criteriaQuery
+                .multiselect(categoryName, criteriaBuilder.count(fileMetadataRoot))
+                .where(criteriaBuilder.and(
+                        createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot),
+                        dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories"))))
+                .groupBy(categoryName);
+        return getStringLongMapResultFromQuery(criteriaQuery);
+    }
+
+    /**
+     * Given a DatasetVersion, returns its file metadata count per DataFileTag.TagType
+     *
+     * @param datasetVersion the DatasetVersion to access
+     * @param searchCriteria for counting only files matching this criteria
+     * @return Map<DataFileTag.TagType, Long> of file metadata counts per DataFileTag.TagType
+     */
+    public Map<DataFileTag.TagType, Long> getFileMetadataCountPerTabularTagName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<Tuple> criteriaQuery = criteriaBuilder.createTupleQuery();
+        Root<FileMetadata> fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+        Root<DataFileTag> dataFileTagRoot = criteriaQuery.from(DataFileTag.class);
+        Path<DataFileTag.TagType> dataFileTagType = dataFileTagRoot.get("type");
+        criteriaQuery
+                .multiselect(dataFileTagType, criteriaBuilder.count(fileMetadataRoot))
+                .where(criteriaBuilder.and(
+                        createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot),
+                        dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags"))))
+                .groupBy(dataFileTagType);
+        List<Tuple> tagNameOccurrences = em.createQuery(criteriaQuery).getResultList();
+        Map<DataFileTag.TagType, Long> result = new HashMap<>();
+        for (Tuple occurrence : tagNameOccurrences) {
+            result.put(occurrence.get(0, DataFileTag.TagType.class), occurrence.get(1, Long.class));
+        }
+        return result;
+    }
+
+    /**
+     * Given a DatasetVersion, returns its file metadata count per FileAccessStatus
+     *
+     * @param datasetVersion the DatasetVersion to access
+     * @param searchCriteria for counting only files matching this criteria
+     * @return Map<FileAccessStatus, Long> of file metadata counts per FileAccessStatus
+     */
+    public Map<FileAccessStatus, Long> getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) {
+        Map<FileAccessStatus, Long> allCounts = new HashMap<>();
+        addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public, searchCriteria);
+        addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted, searchCriteria);
+        addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic, searchCriteria);
+        addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted, searchCriteria);
+        return allCounts;
+    }
+
+    /**
+     * Returns a FileMetadata list of files in the specified DatasetVersion
+     *
+     * @param datasetVersion the DatasetVersion to access
+     * @param limit          for pagination, can be null
+     * @param offset         for pagination, can be null
+     * @param searchCriteria for retrieving only files matching this criteria
+     * @param orderCriteria  a FileOrderCriteria to order the results
+     * @return a FileMetadata list from the specified DatasetVersion
+     */
+    public List<FileMetadata> getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<FileMetadata> criteriaQuery = criteriaBuilder.createQuery(FileMetadata.class);
+        Root<FileMetadata> fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+        criteriaQuery
+                .select(fileMetadataRoot)
+                .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))
+                .orderBy(createGetFileMetadatasOrder(criteriaBuilder, orderCriteria, fileMetadataRoot));
+        TypedQuery<FileMetadata> typedQuery = em.createQuery(criteriaQuery);
+        if (limit != null) {
+            typedQuery.setMaxResults(limit);
+        }
+        if (offset != null) {
+            typedQuery.setFirstResult(offset);
+        }
+        return typedQuery.getResultList();
+    }
+
+    /**
+     * Returns the total download size of all files for a particular DatasetVersion
+     *
+     * @param datasetVersion the DatasetVersion to access
+     * @param searchCriteria for retrieving only files matching this criteria
+     * @param mode           a FileDownloadSizeMode to base the search on
+     * @return long value of total file download size
+     */
+    public long getFilesDownloadSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria, FileDownloadSizeMode mode) {
+        return switch (mode) {
+            case All ->
+                    Long.sum(getOriginalTabularFilesSize(datasetVersion, searchCriteria), getArchivalFilesSize(datasetVersion, false, searchCriteria));
+            case Original ->
+                    Long.sum(getOriginalTabularFilesSize(datasetVersion, searchCriteria), getArchivalFilesSize(datasetVersion, true, searchCriteria));
+            case Archival -> getArchivalFilesSize(datasetVersion, false, searchCriteria);
+        };
+    }
+
+    private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map<FileAccessStatus, Long> totalCounts, FileAccessStatus dataFileAccessStatus, FileSearchCriteria searchCriteria) {
+        long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus, searchCriteria);
+        if (fileMetadataCount > 0) {
+            totalCounts.put(dataFileAccessStatus, fileMetadataCount);
+        }
+    }
+
+    private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus, FileSearchCriteria searchCriteria) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class);
+        Root<FileMetadata> fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+        criteriaQuery
+                .select(criteriaBuilder.count(fileMetadataRoot))
+                .where(criteriaBuilder.and(
+                        createSearchCriteriaAccessStatusPredicate(accessStatus, criteriaBuilder, fileMetadataRoot),
+                        createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)));
+        return em.createQuery(criteriaQuery).getSingleResult();
+    }
+
+    private Predicate createSearchCriteriaAccessStatusPredicate(FileAccessStatus accessStatus, CriteriaBuilder criteriaBuilder, Root<FileMetadata> fileMetadataRoot) {
+        Path<Object> dataFile = fileMetadataRoot.get("dataFile");
+        Path<Object> embargo = dataFile.get("embargo");
+        Predicate activelyEmbargoedPredicate = criteriaBuilder.greaterThanOrEqualTo(embargo.<Date>get("dateAvailable"), criteriaBuilder.currentDate());
+        Predicate inactivelyEmbargoedPredicate = criteriaBuilder.isNull(embargo);
+        Path<Boolean> isRestricted = dataFile.get("restricted");
+        Predicate isRestrictedPredicate = criteriaBuilder.isTrue(isRestricted);
+        Predicate isUnrestrictedPredicate = criteriaBuilder.isFalse(isRestricted);
+        return switch (accessStatus) {
+            case EmbargoedThenRestricted -> criteriaBuilder.and(activelyEmbargoedPredicate, isRestrictedPredicate);
+            case EmbargoedThenPublic -> criteriaBuilder.and(activelyEmbargoedPredicate, isUnrestrictedPredicate);
+            case Restricted -> criteriaBuilder.and(inactivelyEmbargoedPredicate, isRestrictedPredicate);
+            case Public -> criteriaBuilder.and(inactivelyEmbargoedPredicate, isUnrestrictedPredicate);
+        };
+    }
+
+    private Predicate createSearchCriteriaPredicate(DatasetVersion datasetVersion,
+                                                    FileSearchCriteria searchCriteria,
+                                                    CriteriaBuilder criteriaBuilder,
+                                                    CriteriaQuery<?> criteriaQuery,
+                                                    Root<FileMetadata> fileMetadataRoot) {
+        List<Predicate> predicates = new ArrayList<>();
+        Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").<String>get("id"), datasetVersion.getId());
+        predicates.add(basePredicate);
+        String contentType = searchCriteria.getContentType();
+        if (contentType != null) {
+            predicates.add(criteriaBuilder.equal(fileMetadataRoot.get("dataFile").<String>get("contentType"), contentType));
+        }
+        FileAccessStatus accessStatus = searchCriteria.getAccessStatus();
+        if (accessStatus != null) {
+            predicates.add(createSearchCriteriaAccessStatusPredicate(accessStatus, criteriaBuilder, fileMetadataRoot));
+        }
+        String categoryName = searchCriteria.getCategoryName();
+        if (categoryName != null) {
+            Root<DataFileCategory> dataFileCategoryRoot = criteriaQuery.from(DataFileCategory.class);
+            predicates.add(criteriaBuilder.equal(dataFileCategoryRoot.get("name"), categoryName));
+            predicates.add(dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories")));
+        }
+        String tabularTagName = searchCriteria.getTabularTagName();
+        if (tabularTagName != null) {
+            Root<DataFileTag> dataFileTagRoot = criteriaQuery.from(DataFileTag.class);
+            predicates.add(criteriaBuilder.equal(dataFileTagRoot.get("type"), TagLabelToTypes.get(tabularTagName)));
+            predicates.add(dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags")));
+        }
+        String searchText = searchCriteria.getSearchText();
+        if (searchText != null && !searchText.isEmpty()) {
+            searchText = searchText.trim().toLowerCase();
+            predicates.add(criteriaBuilder.like(fileMetadataRoot.get("label"), "%" + searchText + "%"));
+        }
+        return criteriaBuilder.and(predicates.toArray(new Predicate[]{}));
+    }
+
+    private List<Order> createGetFileMetadatasOrder(CriteriaBuilder criteriaBuilder,
+                                                    FileOrderCriteria orderCriteria,
+                                                    Root<FileMetadata> fileMetadataRoot) {
+        Path<Object> label = fileMetadataRoot.get("label");
+        Path<Object> dataFile = fileMetadataRoot.get("dataFile");
+        Path<Timestamp> publicationDate = dataFile.get("publicationDate");
+        Path<Timestamp> createDate = dataFile.get("createDate");
+        Expression<Object> orderByLifetimeExpression = criteriaBuilder.selectCase().when(publicationDate.isNotNull(), publicationDate).otherwise(createDate);
+        List<Order> orderList = new ArrayList<>();
+        switch (orderCriteria) {
+            case NameZA -> orderList.add(criteriaBuilder.desc(label));
+            case Newest -> orderList.add(criteriaBuilder.desc(orderByLifetimeExpression));
+            case Oldest -> orderList.add(criteriaBuilder.asc(orderByLifetimeExpression));
+            case Size -> orderList.add(criteriaBuilder.asc(dataFile.get("filesize")));
+            case Type -> {
+                orderList.add(criteriaBuilder.asc(dataFile.get("contentType")));
+                orderList.add(criteriaBuilder.asc(label));
+            }
+            default -> orderList.add(criteriaBuilder.asc(label));
+        }
+        return orderList;
+    }
+
+    private long getOriginalTabularFilesSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class);
+        Root<FileMetadata> fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+        Root<DataTable> dataTableRoot = criteriaQuery.from(DataTable.class);
+        criteriaQuery
+                .select(criteriaBuilder.sum(dataTableRoot.get("originalFileSize")))
+                .where(criteriaBuilder.and(
+                        criteriaBuilder.equal(dataTableRoot.get("dataFile"), fileMetadataRoot.get("dataFile")),
+                        createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)));
+        Long result = em.createQuery(criteriaQuery).getSingleResult();
+        return (result == null) ? 0 : result;
+    }
+
+    private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular, FileSearchCriteria searchCriteria) {
+        CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+        CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class);
+        Root<FileMetadata> fileMetadataRoot = criteriaQuery.from(FileMetadata.class);
+        Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot);
+        Predicate wherePredicate;
+        if (ignoreTabular) {
+            wherePredicate = criteriaBuilder.and(searchCriteriaPredicate, criteriaBuilder.isEmpty(fileMetadataRoot.get("dataFile").get("dataTables")));
+        } else {
+            wherePredicate = searchCriteriaPredicate;
+        }
+        criteriaQuery
+                .select(criteriaBuilder.sum(fileMetadataRoot.get("dataFile").get("filesize")))
+                .where(wherePredicate);
+        Long result = em.createQuery(criteriaQuery).getSingleResult();
+        return (result == null) ? 0 : result;
+    }
+
+    private Map<String, Long> getStringLongMapResultFromQuery(CriteriaQuery<Tuple> criteriaQuery) {
+        List<Tuple> categoryNameOccurrences = em.createQuery(criteriaQuery).getResultList();
+        Map<String, Long> result = new HashMap<>();
+        for (Tuple occurrence : categoryNameOccurrences) {
+            result.put(occurrence.get(0, String.class), occurrence.get(1, Long.class));
+        }
+        return result;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java
index c086fed3b10..a5ea487a68f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionNoteValidator.java
@@ -6,8 +6,8 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
index 23fc1961b7d..1ee517c9831 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java
@@ -2,6 +2,7 @@
 
 import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
 import edu.harvard.iq.dataverse.ingest.IngestUtil;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -12,7 +13,7 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.MarkupChecker;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import java.io.IOException;
+
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -21,22 +22,21 @@
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
-import java.util.concurrent.Future;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 import org.apache.commons.lang3.StringUtils;
-import org.apache.solr.client.solrj.SolrServerException;
-    
+
 /**
  *
  * @author skraffmiller
@@ -48,7 +48,7 @@ public class DatasetVersionServiceBean implements java.io.Serializable {
     private static final Logger logger = Logger.getLogger(DatasetVersionServiceBean.class.getCanonicalName());
 
     private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss");
-    
+
     @EJB
     DatasetServiceBean datasetService;
     
@@ -149,11 +149,61 @@ public DatasetVersion getDatasetVersion(){
             return this.datasetVersionForResponse;
         }                
     } // end RetrieveDatasetVersionResponse
-    
+
     public DatasetVersion find(Object pk) {
         return em.find(DatasetVersion.class, pk);
     }
-
+    
+    public DatasetVersion findDeep(Object pk) {
+        return (DatasetVersion) em.createNamedQuery("DatasetVersion.findById")
+            .setParameter("id", pk)
+            // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files 
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.ingestRequest")
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.thumbnailForDataset")
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.dataTables")
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.fileCategories")
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.embargo")
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.datasetVersion")
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.releaseUser")
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.creator")
+            .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.dataFileTags")
+            .getSingleResult();
+    }
+    
+    /**
+     * Performs the same database lookup as the one behind Dataset.getVersions().
+     * Additionally, provides the arguments for selecting a partial list of 
+     * (length-offset) versions for pagination, plus the ability to pre-select 
+     * only the publicly-viewable versions. 
+     * It is recommended that individual software components utilize the 
+     * ListVersionsCommand, instead of calling this service method directly.
+     * @param datasetId
+     * @param offset for pagination through long lists of versions
+     * @param length for pagination through long lists of versions
+     * @param includeUnpublished retrieves all the versions, including drafts and deaccessioned. 
+     * @return (partial) list of versions
+     */
+    public List<DatasetVersion> findVersions(Long datasetId, Integer offset, Integer length, boolean includeUnpublished) {
+        TypedQuery<DatasetVersion> query;  
+        if (includeUnpublished) {
+            query = em.createNamedQuery("DatasetVersion.findByDataset", DatasetVersion.class);
+        } else {
+            query = em.createNamedQuery("DatasetVersion.findReleasedByDataset", DatasetVersion.class)
+                    .setParameter("datasetId", datasetId);
+        }
+        
+        query.setParameter("datasetId", datasetId);
+        
+        if (offset != null) {
+            query.setFirstResult(offset);
+        }
+        if (length != null) {
+            query.setMaxResults(length);
+        }
+        
+        return query.getResultList();
+    }
+    
     public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendlyVersionNumber) {
         Long majorVersionNumber = null;
         Long minorVersionNumber = null;
@@ -180,7 +230,7 @@ public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendl
                 query.setParameter("majorVersionNumber", majorVersionNumber);
                 query.setParameter("minorVersionNumber", minorVersionNumber);
                 foundDatasetVersion = (DatasetVersion) query.getSingleResult();
-            } catch (javax.persistence.NoResultException e) {
+            } catch (NoResultException e) {
                 logger.warning("no ds version found: " + datasetId + " " + friendlyVersionNumber);
                 // DO nothing, just return null.
             }
@@ -208,7 +258,7 @@ public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendl
                     }
                 }
                 return retVal;
-            } catch (javax.persistence.NoResultException e) {
+            } catch (NoResultException e) {
                 logger.warning("no ds version found: " + datasetId + " " + friendlyVersionNumber);
                 // DO nothing, just return null.
             }
@@ -435,7 +485,7 @@ private DatasetVersion getDatasetVersionByQuery(String queryString){
             msg("Found: " + ds);
             return ds;
             
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             msg("DatasetVersion not found: " + queryString);
             logger.log(Level.FINE, "DatasetVersion not found: {0}", queryString);
             return null;
@@ -445,10 +495,24 @@ private DatasetVersion getDatasetVersionByQuery(String queryString){
          }
     } // end getDatasetVersionByQuery
     
-    
-    
-    
-    public DatasetVersion retrieveDatasetVersionByIdentiferClause(String identifierClause, String version){
+    /**
+     * @deprecated because of a typo; use {@link #retrieveDatasetVersionByIdentifierClause(String, String) retrieveDatasetVersionByIdentifierClause} instead
+     * @see #retrieveDatasetVersionByIdentifierClause(String, String)
+     * @param identifierClause
+     * @param version
+     * @return a DatasetVersion if found, or {@code null} otherwise
+     */
+    @Deprecated
+    public DatasetVersion retrieveDatasetVersionByIdentiferClause(String identifierClause, String version) {
+        return retrieveDatasetVersionByIdentifierClause(identifierClause, version);
+    }
+
+    /**
+     * @param identifierClause
+     * @param version
+     * @return a DatasetVersion if found, or {@code null} otherwise
+     */
+    public DatasetVersion retrieveDatasetVersionByIdentifierClause(String identifierClause, String version) {
         
         if (identifierClause == null){
             return null;
@@ -559,7 +623,7 @@ public RetrieveDatasetVersionResponse retrieveDatasetVersionByPersistentId(Strin
         */
         GlobalId parsedId;
         try{
-            parsedId = new GlobalId(persistentId);   // [ protocol, authority, identifier]
+            parsedId = PidUtil.parseAsGlobalID(persistentId);   // [ protocol, authority, identifier]
         } catch (IllegalArgumentException e){
             logger.log(Level.WARNING, "Failed to parse persistentID: {0}", persistentId);
             return null;
@@ -570,7 +634,7 @@ public RetrieveDatasetVersionResponse retrieveDatasetVersionByPersistentId(Strin
         identifierClause += " AND ds.identifier = '" + parsedId.getIdentifier() + "'"; 
         
 
-        DatasetVersion ds = retrieveDatasetVersionByIdentiferClause(identifierClause, version);
+        DatasetVersion ds = retrieveDatasetVersionByIdentifierClause(identifierClause, version);
         
         if (ds != null){
             msg("retrieved dataset: " + ds.getId() + " semantic: " + ds.getSemanticVersion());
@@ -668,7 +732,7 @@ public DatasetVersion getDatasetVersionById(Long datasetId, String version){
         
         String identifierClause = this.getIdClause(datasetId);
 
-        DatasetVersion ds = retrieveDatasetVersionByIdentiferClause(identifierClause, version);
+        DatasetVersion ds = retrieveDatasetVersionByIdentifierClause(identifierClause, version);
         
         return ds;
 
@@ -761,7 +825,7 @@ public Long getThumbnailByVersionId(Long versionId) {
                         + "AND df.id = o.id "
                         + "AND fm.datasetversion_id = dv.id "
                         + "AND fm.datafile_id = df.id "
-                        // + "AND o.previewImageAvailable = false "
+                        + "AND o.previewimagefail = false "
                         + "AND df.restricted = false "
                         + "AND df.embargo_id is null "
                         + "AND df.contenttype LIKE 'image/%' "
@@ -795,7 +859,7 @@ public Long getThumbnailByVersionId(Long versionId) {
                         + "AND df.id = o.id "
                         + "AND fm.datasetversion_id = dv.id "
                         + "AND fm.datafile_id = df.id "
-                        // + "AND o.previewImageAvailable = false "
+                        + "AND o.previewimagefail = false "
                         + "AND df.restricted = false "
                         + "AND df.embargo_id is null "
                         + "AND df.contenttype = 'application/pdf' "
@@ -892,7 +956,7 @@ public void populateDatasetSearchCard(SolrSearchResult solrSearchResult) {
         if (searchResult.length == 5) {
             Dataset datasetEntity = new Dataset();
             String globalIdentifier = solrSearchResult.getIdentifier();
-            GlobalId globalId = new GlobalId(globalIdentifier);
+            GlobalId globalId = PidUtil.parseAsGlobalID(globalIdentifier);
 
             datasetEntity.setProtocol(globalId.getProtocol());
             datasetEntity.setAuthority(globalId.getAuthority());
@@ -1117,13 +1181,7 @@ public JsonObjectBuilder fixMissingUnf(String datasetVersionId, boolean forceRec
 
         // reindexing the dataset, to make sure the new UNF is in SOLR:
         boolean doNormalSolrDocCleanUp = true;
-        try {
-            Future<String> indexingResult = indexService.indexDataset(datasetVersion.getDataset(), doNormalSolrDocCleanUp);
-        } catch (IOException | SolrServerException e) {    
-            String failureLogText = "Post UNF update indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + datasetVersion.getDataset().getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(null, failureLogText,  datasetVersion.getDataset());
-        }
+        indexService.asyncIndexDataset(datasetVersion.getDataset(), doNormalSolrDocCleanUp);
         return info;
     }
     
@@ -1207,7 +1265,7 @@ public List<DatasetVersion> getUnarchivedDatasetVersions(){
         try {
             List<DatasetVersion> dsl = em.createNamedQuery("DatasetVersion.findUnarchivedReleasedVersion", DatasetVersion.class).getResultList();
             return dsl;
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             logger.log(Level.FINE, "No unarchived DatasetVersions found: {0}");
             return null;
         } catch (EJBException e) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java
index d09457c86bf..55b98c178bb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUI.java
@@ -6,25 +6,21 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.util.MarkupChecker;
-import edu.harvard.iq.dataverse.util.StringUtil;
+
 import java.io.Serializable;
-import java.sql.Timestamp;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.TreeMap;
-import static java.util.stream.Collectors.toList;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
@@ -35,6 +31,9 @@ public class DatasetVersionUI implements Serializable {
 
     @EJB
     DataverseServiceBean dataverseService;
+    @Inject
+    SettingsWrapper settingsWrapper;
+    
     @PersistenceContext(unitName = "VDCNet-ejbPU")
     private EntityManager em;   
     
@@ -400,6 +399,9 @@ public void setMetadataValueBlocks(DatasetVersion datasetVersion) {
         //TODO: A lot of clean up on the logic of this method
         metadataBlocksForView.clear();
         metadataBlocksForEdit.clear();
+        
+        List<MetadataBlock> systemMDBlocks = settingsWrapper.getSystemMetadataBlocks();
+        
         Long dvIdForInputLevel = datasetVersion.getDataset().getOwner().getId();
         
         if (!dataverseService.find(dvIdForInputLevel).isMetadataBlockRoot()){
@@ -442,7 +444,7 @@ public void setMetadataValueBlocks(DatasetVersion datasetVersion) {
             if (!datasetFieldsForView.isEmpty()) {
                 metadataBlocksForView.put(mdb, datasetFieldsForView);
             }
-            if (!datasetFieldsForEdit.isEmpty()) {
+            if (!datasetFieldsForEdit.isEmpty() && !systemMDBlocks.contains(mdb)) {
                 metadataBlocksForEdit.put(mdb, datasetFieldsForEdit);
             }
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java
index eda62a080f8..e56fad71253 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionUser.java
@@ -4,20 +4,20 @@
 import edu.harvard.iq.dataverse.authorization.users.User;
 import java.io.Serializable;
 import java.sql.Timestamp;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
 
-import javax.persistence.Id;
-import javax.persistence.Index;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
 
-import javax.persistence.JoinColumn;
+import jakarta.persistence.JoinColumn;
 
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * Records the last time a {@link User} handled a {@link DatasetVersion}.
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java
index 9cc611e146a..1dd42903118 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetWidgetsPage.java
@@ -14,10 +14,10 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.primefaces.event.FileUploadEvent;
 import org.primefaces.model.file.UploadedFile;
 
@@ -164,7 +164,7 @@ public String save() {
         try {
             DatasetThumbnail datasetThumbnailFromCommand = commandEngine.submit(updateDatasetThumbnailCommand);
             JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("dataset.thumbnailsAndWidget.success"));
-            return "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&faces-redirect=true";
+            return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&faces-redirect=true";
         } catch (CommandException ex) {
             String error = ex.getLocalizedMessage();
             /**
@@ -179,7 +179,7 @@ public String save() {
 
     public String cancel() {
         logger.fine("cancel clicked");
-        return "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&faces-redirect=true";
+        return "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&faces-redirect=true";
     }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
index bc8716b6129..c1de9d63410 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
@@ -2,8 +2,8 @@
 
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
-import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch;
+import edu.harvard.iq.dataverse.storageuse.StorageUse;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 
@@ -13,29 +13,28 @@
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.FetchType;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotNull;
-import javax.validation.constraints.Pattern;
-import javax.validation.constraints.Size;
-
-import org.apache.commons.lang3.StringUtils;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.JoinTable;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotNull;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.Size;
+
 import org.hibernate.validator.constraints.NotBlank;
 import org.hibernate.validator.constraints.NotEmpty;
 
@@ -105,7 +104,11 @@ public enum DataverseType {
      * dataverses.
      */
     protected boolean permissionRoot;
-
+    
+    public Dataverse() {
+        StorageUse storageUse = new StorageUse(this); 
+        this.setStorageUse(storageUse);
+    }
     
     public DataverseType getDataverseType() {
         return dataverseType;
@@ -590,8 +593,34 @@ public void setCitationDatasetFieldTypes(List<DatasetFieldType> citationDatasetF
         this.citationDatasetFieldTypes = citationDatasetFieldTypes;
     }
     
-    
+    /**
+     * @Note: this setting is Nullable, with {@code null} indicating that the 
+     * desired behavior is not explicitly configured for this specific collection. 
+     * See the comment below. 
+     */
+    @Column(nullable = true)
+    private Boolean filePIDsEnabled;
 
+    /**
+     * Specifies whether the PIDs for Datafiles should be registered when publishing 
+     * datasets in this Collection, if the behavior is explicitly configured.
+     * @return {@code Boolean.TRUE} if explicitly enabled, {@code Boolean.FALSE} if explicitly disabled. 
+     * {@code null} indicates that the behavior is not explicitly defined, in which 
+     * case the behavior should follow the explicit configuration of the first 
+     * direct ancestor collection, or the instance-wide configuration, if none 
+     * present. 
+     * @Note: If present, this configuration therefore by default applies to all 
+     * the sub-collections, unless explicitly overwritten there.
+     * @author landreev
+     */
+    public Boolean getFilePIDsEnabled() {
+        return filePIDsEnabled;
+    }
+    
+    public void setFilePIDsEnabled(boolean filePIDsEnabled) {
+        this.filePIDsEnabled = filePIDsEnabled;
+    }
+    
     public List<DataverseFacet> getDataverseFacets() {
         return getDataverseFacets(false);
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java
index 46021ddbc9b..9f86a03639a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java
@@ -7,15 +7,15 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 import edu.harvard.iq.dataverse.validation.ValidateEmail;
 import org.hibernate.validator.constraints.NotBlank;
@@ -99,7 +99,7 @@ public int hashCode() {
 
     @Override
     public boolean equals(Object object) {
-        if (!(object instanceof DatasetFieldType)) {
+        if (!(object instanceof DataverseContact)) {
             return false;
         }
         DataverseContact other = (DataverseContact) object;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
index 7d09c300dde..d802117043b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseConverter.java
@@ -6,12 +6,12 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java
index bfd465b8f54..83a2d8fdb8f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java
@@ -8,16 +8,16 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  *
@@ -93,7 +93,7 @@ public int hashCode() {
 
     @Override
     public boolean equals(Object object) {
-        if (!(object instanceof DatasetFieldType)) {
+        if (!(object instanceof DataverseFacet)) {
             return false;
         }
         DataverseFacet other = (DataverseFacet) object;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
index 67bf6a820e2..5c77989f6d6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacetServiceBean.java
@@ -2,11 +2,11 @@
 
 import edu.harvard.iq.dataverse.util.LruCache;
 import java.util.List;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java
index 662ee74c3bf..d30d94cd034 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java
@@ -2,16 +2,16 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  *
@@ -85,7 +85,7 @@ public int hashCode() {
 
     @Override
     public boolean equals(Object object) {
-        if (!(object instanceof DatasetFieldType)) {
+        if (!(object instanceof DataverseFeaturedDataverse)) {
             return false;
         }
         DataverseFeaturedDataverse other = (DataverseFeaturedDataverse) object;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java
index 92b1ff7c2cf..a3425987bf8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevel.java
@@ -6,17 +6,17 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
+import jakarta.persistence.UniqueConstraint;
 
 /**
  *
@@ -30,8 +30,9 @@
     @NamedQuery(name = "DataverseFieldTypeInputLevel.findByDataverseIdDatasetFieldTypeId",
             query = "select f from DataverseFieldTypeInputLevel f where f.dataverse.id = :dataverseId and f.datasetFieldType.id = :datasetFieldTypeId"),
     @NamedQuery(name = "DataverseFieldTypeInputLevel.findByDataverseIdAndDatasetFieldTypeIdList",
-            query = "select f from DataverseFieldTypeInputLevel f where f.dataverse.id = :dataverseId and f.datasetFieldType.id in :datasetFieldIdList")
- 
+            query = "select f from DataverseFieldTypeInputLevel f where f.dataverse.id = :dataverseId and f.datasetFieldType.id in :datasetFieldIdList"),
+    @NamedQuery(name = "DataverseFieldTypeInputLevel.findRequiredByDataverseId",
+            query = "select f from DataverseFieldTypeInputLevel f where f.dataverse.id = :dataverseId and f.required = 'true' ")
 })
 @Table(name="DataverseFieldTypeInputLevel"
         ,  uniqueConstraints={
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java
index 42a1290fdbd..1bd290ecc4d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFieldTypeInputLevelServiceBean.java
@@ -7,13 +7,13 @@
 
 import edu.harvard.iq.dataverse.util.LruCache;
 import java.util.List;
-import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 
 /**
  *
@@ -88,6 +88,16 @@ public DataverseFieldTypeInputLevel findByDataverseIdDatasetFieldTypeId(Long dat
             return null;
         }         
     }
+    
+    public List<DataverseFieldTypeInputLevel> findRequiredByDataverseId(Long dataverseId) {
+        Query query = em.createNamedQuery("DataverseFieldTypeInputLevel.findRequiredByDataverseId", DataverseFieldTypeInputLevel.class);
+        query.setParameter("dataverseId", dataverseId);
+        try{
+            return query.getResultList();
+        } catch ( NoResultException nre ) {
+            return null;
+        }         
+    }
 
     public void delete(DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel) {
         em.remove(em.merge(dataverseFieldTypeInputLevel));
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java b/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java
index 1e1353a11fc..389b85c19d9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseHeaderFragment.java
@@ -19,11 +19,11 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java
index 788308dce1e..3030922ea5e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingDataverse.java
@@ -7,18 +7,18 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java
index c823deddb64..834ff96e392 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseLinkingServiceBean.java
@@ -8,13 +8,13 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
@@ -66,7 +66,7 @@ public DataverseLinkingDataverse findDataverseLinkingDataverse(Long dataverseId,
                 .setParameter("dataverseId", dataverseId)
                 .setParameter("linkingDataverseId", linkingDataverseId)
                 .getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
             logger.fine("No DataverseLinkingDataverse found for dataverseId " + dataverseId + " and linkedDataverseId " + linkingDataverseId);        
             return null;
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java
index a2659b81974..c93144b2e97 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacet.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse;
 
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 import java.io.Serializable;
 import java.util.Objects;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
index b48ff725e1e..943a74327d5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
@@ -28,27 +28,26 @@
 import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
 import org.primefaces.model.DualListModel;
-import javax.ejb.EJBException;
-import javax.faces.event.ValueChangeEvent;
-import javax.faces.model.SelectItem;
+import jakarta.ejb.EJBException;
+import jakarta.faces.event.ValueChangeEvent;
+import jakarta.faces.model.SelectItem;
 import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.primefaces.PrimeFaces;
@@ -1287,4 +1286,7 @@ public String getCurationLabelSetNameLabel() {
         return setName;
     }
 
+    public Set<Entry<String, String>> getGuestbookEntryOptions() {
+        return settingsWrapper.getGuestbookEntryOptions(this.dataverse).entrySet();
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java
index e193b535412..58a3837dbf9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRequestServiceBean.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import javax.annotation.PostConstruct;
-import javax.enterprise.context.RequestScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.annotation.PostConstruct;
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  * The service bean to go to when one needs the current {@link DataverseRequest}.
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java
index 9d09d0580e2..78d5eaf3414 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java
@@ -17,13 +17,13 @@
 import java.util.Set;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
-//import javax.validation.constraints.NotNull;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+//import jakarta.validation.constraints.NotNull;
 
 /**
  *
@@ -303,7 +303,7 @@ public Set<DataverseRole> availableRoles(Long dvId) {
         Set<DataverseRole> roles = dv.getRoles();
         roles.addAll(findBuiltinRoles());
 
-        while (!dv.isEffectivelyPermissionRoot()) {
+        while (dv.getOwner() != null) {
             dv = dv.getOwner();
             roles.addAll(dv.getRoles());
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
index e092f209acd..10b5d800c21 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java
@@ -18,8 +18,11 @@
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
 import edu.harvard.iq.dataverse.search.SolrSearchResult;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.storageuse.StorageQuota;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import java.io.File;
 import java.io.IOException;
 import java.sql.Timestamp;
@@ -30,19 +33,27 @@
 import java.util.Map;
 import java.util.logging.Logger;
 import java.util.Properties;
-import java.util.concurrent.Future;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.solr.client.solrj.SolrServerException;
+import org.everit.json.schema.Schema;
+import org.everit.json.schema.ValidationException;
+import org.everit.json.schema.loader.SchemaLoader;
+import org.json.JSONObject;
+import org.json.JSONTokener;
 
 /**
  *
@@ -80,6 +91,9 @@ public class DataverseServiceBean implements java.io.Serializable {
     @EJB
     PermissionServiceBean permissionService;
     
+    @EJB
+    DataverseFieldTypeInputLevelServiceBean dataverseFieldTypeInputLevelService;
+    
     @EJB
     SystemConfig systemConfig;
 
@@ -346,51 +360,6 @@ public String getDataverseLogoThumbnailAsBase64ById(Long dvId) {
         } 
         return null;
     }
-    
-    /*
-    public boolean isDataverseLogoThumbnailAvailable(Dataverse dataverse, User user) {    
-        if (dataverse == null) {
-            return false; 
-        }
-                
-        // First, check if the dataverse has a defined logo: 
-        
-        //if (dataverse.getDataverseTheme() != null && dataverse.getDataverseTheme().getLogo() != null && !dataverse.getDataverseTheme().getLogo().equals("")) {
-            File dataverseLogoFile = getLogo(dataverse);
-            if (dataverseLogoFile != null) {
-                String logoThumbNailPath = null;
-
-                if (dataverseLogoFile.exists()) {
-                    logoThumbNailPath = ImageThumbConverter.generateImageThumbnailFromFile(dataverseLogoFile.getAbsolutePath(), 48);
-                    if (logoThumbNailPath != null) {
-                        return true;
-                    }
-                }
-            }
-        //}
-        */
-        // If there's no uploaded logo for this dataverse, go through its 
-        // [released] datasets and see if any of them have card images:
-        // 
-        // TODO:
-        // Discuss/Decide if we really want to do this - i.e., go through every
-        // file in every dataset below... 
-        // -- L.A. 4.0 beta14
-        /*
-        for (Dataset dataset : datasetService.findPublishedByOwnerId(dataverse.getId())) {
-            if (dataset != null) {
-                DatasetVersion releasedVersion = dataset.getReleasedVersion();
-                
-                if (releasedVersion != null) {
-                    if (datasetService.isDatasetCardImageAvailable(releasedVersion, user)) {
-                        return true;
-                    }
-                }
-            }
-        }   */     
-        /*
-        return false; 
-    } */
         
     private File getLogo(Dataverse dataverse) {
         if (dataverse.getId() == null) {
@@ -399,16 +368,7 @@ private File getLogo(Dataverse dataverse) {
         
         DataverseTheme theme = dataverse.getDataverseTheme(); 
         if (theme != null && theme.getLogo() != null && !theme.getLogo().isEmpty()) {
-            Properties p = System.getProperties();
-            String domainRoot = p.getProperty("com.sun.aas.instanceRoot");
-  
-            if (domainRoot != null && !"".equals(domainRoot)) {
-                return new File (domainRoot + File.separator + 
-                    "docroot" + File.separator + 
-                    "logos" + File.separator + 
-                    dataverse.getLogoOwnerId() + File.separator + 
-                    theme.getLogo());
-            }
+            return ThemeWidgetFragment.getLogoDir(dataverse.getLogoOwnerId()).resolve(theme.getLogo()).toFile();
         }
             
         return null;         
@@ -928,5 +888,294 @@ public List<Object[]> getDatasetTitlesWithinDataverse(Long dataverseId) {
         return em.createNativeQuery(cqString).getResultList();
     }
 
+        
+    public  String getCollectionDatasetSchema(String dataverseAlias) {
+        
+        Dataverse testDV = this.findByAlias(dataverseAlias);
+        
+        while (!testDV.isMetadataBlockRoot()) {
+            if (testDV.getOwner() == null) {
+                break; // we are at the root; which by defintion is metadata blcok root, regarldess of the value
+            }
+            testDV = testDV.getOwner();
+        }
+        
+        /* Couldn't get the 'return base if no extra required fields to work with the path provided
+        leaving it as 'out of scope' for now SEK 11/27/2023
+
+        List<DataverseFieldTypeInputLevel> required = new ArrayList<>();
+
+        required = dataverseFieldTypeInputLevelService.findRequiredByDataverseId(testDV.getId());
+        
+        if (required == null || required.isEmpty()){
+            String pathToJsonFile = "src/main/resources/edu/harvas/iq/dataverse/baseDatasetSchema.json";
+            String baseSchema = getBaseSchemaStringFromFile(pathToJsonFile);
+            if (baseSchema != null && !baseSchema.isEmpty()){
+                return baseSchema;
+            }
+        }
+        
+        */
+        List<MetadataBlock> selectedBlocks = new ArrayList<>();
+        List<DatasetFieldType> requiredDSFT = new ArrayList<>();
+        
+        selectedBlocks.addAll(testDV.getMetadataBlocks());
+
+        for (MetadataBlock mdb : selectedBlocks) {
+            for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
+                if (!dsft.isChild()) {
+                    DataverseFieldTypeInputLevel dsfIl = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(testDV.getId(), dsft.getId());
+                    if (dsfIl != null) {
+                        dsft.setRequiredDV(dsfIl.isRequired());
+                        dsft.setInclude(dsfIl.isInclude());
+                    } else {
+                        dsft.setRequiredDV(dsft.isRequired());
+                        dsft.setInclude(true);
+                    }
+                    if (dsft.isHasChildren()) {
+                        for (DatasetFieldType child : dsft.getChildDatasetFieldTypes()) {
+                            DataverseFieldTypeInputLevel dsfIlChild = dataverseFieldTypeInputLevelService.findByDataverseIdDatasetFieldTypeId(testDV.getId(), child.getId());
+                            if (dsfIlChild != null) {
+                                child.setRequiredDV(dsfIlChild.isRequired());
+                                child.setInclude(dsfIlChild.isInclude());
+                            } else {
+                                // in the case of conditionally required (child = true, parent = false)
+                                // we set this to false; i.e this is the default "don't override" value
+                                child.setRequiredDV(child.isRequired() && dsft.isRequired());
+                                child.setInclude(true);
+                            }
+                        }
+                    }
+                    if(dsft.isRequiredDV()){
+                        requiredDSFT.add(dsft);
+                    }
+                }
+            }            
+
+        }
+        
+        String reqMDBNames = "";
+        List<MetadataBlock> hasReqFields = new ArrayList<>();
+        String retval = datasetSchemaPreface;
+        for (MetadataBlock mdb : selectedBlocks) {
+            for (DatasetFieldType dsft : requiredDSFT) {
+                if (dsft.getMetadataBlock().equals(mdb)) {
+                    hasReqFields.add(mdb);
+                    if (!reqMDBNames.isEmpty()) reqMDBNames += ",";
+                    reqMDBNames += "\"" + mdb.getName() + "\"";
+                    break;
+                }
+            }
+        }
+        int countMDB = 0;
+        for (MetadataBlock mdb : hasReqFields) {
+            if (countMDB>0){
+                retval += ",";
+            }
+            retval += getCustomMDBSchema(mdb, requiredDSFT);
+            countMDB++;            
+        }
+        
+        retval += "\n                     }";
+        
+        retval += endOfjson.replace("blockNames", reqMDBNames);
+
+        return retval;
+    
+    }    
+    
+    private String getCustomMDBSchema (MetadataBlock mdb, List<DatasetFieldType> requiredDSFT){
+        String retval = "";
+        boolean mdbHasReqField = false;
+        int numReq = 0;
+        List<DatasetFieldType> requiredThisMDB = new ArrayList<>();
+        
+        for (DatasetFieldType dsft : requiredDSFT ){
+
+            if(dsft.getMetadataBlock().equals(mdb)){
+                numReq++;
+                mdbHasReqField = true;
+                requiredThisMDB.add(dsft);
+            }
+        }
+        if (mdbHasReqField){
+        retval  += startOfMDB.replace("blockName", mdb.getName());
+        
+        retval += minItemsTemplate.replace("numMinItems", Integer.toString(requiredThisMDB.size()));
+        int count = 0;
+        for (DatasetFieldType dsft:requiredThisMDB ){
+            count++;
+            String reqValImp = reqValTemplate.replace("reqFieldTypeName", dsft.getName());
+            if (count < requiredThisMDB.size()){
+                retval += reqValImp + "\n";
+            } else {
+               reqValImp = StringUtils.substring(reqValImp, 0, reqValImp.length() - 1);
+               retval += reqValImp+ "\n";
+               retval += endOfReqVal;
+            }            
+        }
+        
+        }
+        
+        return retval;
+    }
+    
+    public String isDatasetJsonValid(String dataverseAlias, String jsonInput) {
+        JSONObject rawSchema = new JSONObject(new JSONTokener(getCollectionDatasetSchema(dataverseAlias)));
+        
+        try {               
+            Schema schema = SchemaLoader.load(rawSchema);
+            schema.validate(new JSONObject(jsonInput)); // throws a ValidationException if this object is invalid
+        } catch (ValidationException vx) {
+            logger.info(BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " " + vx.getErrorMessage()); 
+            String accumulatedexceptions = "";
+            for (ValidationException va : vx.getCausingExceptions()){
+                accumulatedexceptions = accumulatedexceptions + va;
+                accumulatedexceptions = accumulatedexceptions.replace("org.everit.json.schema.ValidationException:", " ");
+            }
+            if (!accumulatedexceptions.isEmpty()){
+                return BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " "  + accumulatedexceptions;
+            } else {
+                return BundleUtil.getStringFromBundle("dataverses.api.validate.json.failed") + " "  + vx.getErrorMessage();
+            }
+            
+        } catch (Exception ex) {            
+            logger.info(BundleUtil.getStringFromBundle("dataverses.api.validate.json.exception") + ex.getLocalizedMessage());
+            return BundleUtil.getStringFromBundle("dataverses.api.validate.json.exception") + ex.getLocalizedMessage();
+        } 
+
+        return BundleUtil.getStringFromBundle("dataverses.api.validate.json.succeeded");
+    }
+    
+    static String getBaseSchemaStringFromFile(String pathToJsonFile) {
+        File datasetSchemaJson = new File(pathToJsonFile);
+        try {
+            String datasetSchemaAsJson = new String(Files.readAllBytes(Paths.get(datasetSchemaJson.getAbsolutePath())));
+            return datasetSchemaAsJson;
+        } catch (IOException ex) {
+            logger.info("IO - failed to get schema file  - will build on fly " +ex.getMessage());
+            return null;
+        } catch (Exception e){
+            logger.info("Other exception - failed to get schema file  - will build on fly. " + e.getMessage());
+            return null;
+        }
+    }
     
+    private  String datasetSchemaPreface = 
+    "{\n" +
+    "    \"$schema\": \"http://json-schema.org/draft-04/schema#\",\n" +
+    "    \"$defs\": {\n" +
+    "    \"field\": {\n" + 
+    "        \"type\": \"object\",\n" +
+    "        \"required\": [\"typeClass\", \"multiple\", \"typeName\"],\n" +
+    "        \"properties\": {\n" + 
+    "            \"value\": {\n" +
+    "                \"anyOf\": [\n" +
+    "                    {\n" +
+    "                        \"type\": \"array\"\n" +
+    "                    },\n" +
+    "                    {\n" + 
+    "                        \"type\": \"string\"\n" +
+    "                    },\n" +
+    "                    {\n" +
+    "                        \"$ref\": \"#/$defs/field\"\n" +
+    "                    }\n" + 
+    "                ]\n" + 
+    "            },\n" + 
+    "            \"typeClass\": {\n" +
+    "                \"type\": \"string\"\n" +
+    "            },\n" +
+    "            \"multiple\": {\n" +
+    "                \"type\": \"boolean\"\n" +
+    "            },\n" +
+    "            \"typeName\": {\n" + 
+    "                \"type\": \"string\"\n" +
+    "            }\n" +
+    "        }\n" +
+    "    }\n" + 
+    "},\n" + 
+    "\"type\": \"object\",\n" +
+    "\"properties\": {\n" + 
+    "    \"datasetVersion\": {\n" + 
+    "        \"type\": \"object\",\n" +
+    "        \"properties\": {\n" + 
+    "           \"license\": {\n" + 
+    "                \"type\": \"object\",\n" + 
+    "                \"properties\": {\n" + 
+    "                    \"name\": {\n" +
+    "                        \"type\": \"string\"\n" + 
+    "                    },\n" + 
+    "                    \"uri\": {\n" + 
+    "                        \"type\": \"string\",\n" + 
+    "                        \"format\": \"uri\"\n" + 
+    "                   }\n" + 
+    "                },\n" + 
+    "                \"required\": [\"name\", \"uri\"]\n" + 
+    "            },\n" + 
+    "            \"metadataBlocks\": {\n" + 
+    "                \"type\": \"object\",\n" + 
+    "               \"properties\": {\n" +
+    ""  ;
+    
+    private String startOfMDB = "" +
+"                           \"blockName\": {\n" +
+"                            \"type\": \"object\",\n" +
+"                            \"properties\": {\n" +
+"                                \"fields\": {\n" +
+"                                    \"type\": \"array\",\n" +
+"                                    \"items\": {\n" +
+"                                        \"$ref\": \"#/$defs/field\"\n" +
+"                                    },";
+    
+    private String reqValTemplate = "                                        {\n" +
+"                                            \"contains\": {\n" +
+"                                                \"properties\": {\n" +
+"                                                    \"typeName\": {\n" +
+"                                                        \"const\": \"reqFieldTypeName\"\n" +
+"                                                    }\n" +
+"                                                }\n" +
+"                                            }\n" +
+"                                        },";
+    
+    private String minItemsTemplate = "\n                                    \"minItems\": numMinItems,\n" +
+"                                    \"allOf\": [\n";
+    private String endOfReqVal = "                                    ]\n" +
+"                                }\n" +
+"                            },\n" +
+"                            \"required\": [\"fields\"]\n" +
+"                        }";
+    
+    private String endOfjson = ",\n" +
+"                    \"required\": [blockNames]\n" +
+"                }\n" +
+"            },\n" +
+"            \"required\": [\"metadataBlocks\"]\n" +
+"        }\n" +
+"    },\n" +
+"    \"required\": [\"datasetVersion\"]\n" +
+"}\n";
+    
+    public void saveStorageQuota(Dataverse target, Long allocation) {
+        StorageQuota storageQuota = target.getStorageQuota();
+        
+        if (storageQuota != null) {
+            storageQuota.setAllocation(allocation);
+            em.merge(storageQuota);
+        } else {
+            storageQuota = new StorageQuota(); 
+            storageQuota.setDefinitionPoint(target);
+            storageQuota.setAllocation(allocation);
+            target.setStorageQuota(storageQuota);
+            em.persist(storageQuota);
+        }
+        em.flush();
+    }
+    
+    public void disableStorageQuota(StorageQuota storageQuota) {
+        if (storageQuota != null && storageQuota.getAllocation() != null) {
+            storageQuota.setAllocation(null);
+            em.merge(storageQuota);
+            em.flush();
+        }
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
index c6016939c08..e8d76e1825e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseSession.java
@@ -18,13 +18,13 @@
 import java.util.List;
 import java.util.Locale;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.faces.context.FacesContext;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpSession;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java
index 0c6341db485..7f57d16b95a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java
@@ -8,16 +8,16 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 /**
  *
@@ -181,7 +181,7 @@ public int hashCode() {
 
     @Override
     public boolean equals(Object object) {
-        if (!(object instanceof DatasetFieldType)) {
+        if (!(object instanceof DataverseTheme)) {
             return false;
         }
         DataverseTheme other = (DataverseTheme) object;
diff --git a/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java b/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java
index ad48f15fc54..a2dc785c470 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DefaultValueSet.java
@@ -8,13 +8,13 @@
 
 import java.io.Serializable;
 import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.OneToMany;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObject.java b/src/main/java/edu/harvard/iq/dataverse/DvObject.java
index 09a2ef85893..cc5d7620969 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObject.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObject.java
@@ -1,14 +1,18 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
+import edu.harvard.iq.dataverse.storageuse.StorageQuota;
+
 import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
-import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.*;
+import java.util.logging.Logger;
+
+import jakarta.persistence.*;
 
 /**
  * Base of the object hierarchy for "anything that can be inside a dataverse".
@@ -26,9 +30,13 @@
 			query="SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id"),
     @NamedQuery(name = "DvObject.findByGlobalId",
             query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"),
+    @NamedQuery(name = "DvObject.findIdByGlobalId",
+            query = "SELECT o.id FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol and o.dtype=:dtype"),
 
     @NamedQuery(name = "DvObject.findByAlternativeGlobalId",
             query = "SELECT o FROM DvObject o, AlternativePersistentIdentifier a  WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"),
+    @NamedQuery(name = "DvObject.findIdByAlternativeGlobalId",
+            query = "SELECT o.id FROM DvObject o, AlternativePersistentIdentifier a  WHERE o.id = a.dvObject.id and a.identifier=:identifier and a.authority=:authority and a.protocol=:protocol and o.dtype=:dtype"),
 
     @NamedQuery(name = "DvObject.findByProtocolIdentifierAuthority",
             query = "SELECT o FROM DvObject o WHERE o.identifier=:identifier and o.authority=:authority and o.protocol=:protocol"),
@@ -51,10 +59,19 @@
 		uniqueConstraints = {@UniqueConstraint(columnNames = {"authority,protocol,identifier"}),@UniqueConstraint(columnNames = {"owner_id,storageidentifier"})})
 public abstract class DvObject extends DataverseEntity implements java.io.Serializable {
     
-    public static final String DATAVERSE_DTYPE_STRING = "Dataverse";
-    public static final String DATASET_DTYPE_STRING = "Dataset";
-    public static final String DATAFILE_DTYPE_STRING = "DataFile";
-    public static final List<String> DTYPE_LIST = Arrays.asList(DATAVERSE_DTYPE_STRING, DATASET_DTYPE_STRING, DATAFILE_DTYPE_STRING);
+    private static final Logger logger = Logger.getLogger(DvObject.class.getCanonicalName());
+    
+    public enum DType {
+        Dataverse("Dataverse"), Dataset("Dataset"),DataFile("DataFile");
+       
+        String dtype;
+        DType(String dt) {
+           dtype = dt;
+        }
+        public String getDType() {
+           return dtype;
+        } 
+     }
     
     public static final Visitor<String> NamePrinter = new Visitor<String>(){
 
@@ -139,6 +156,8 @@ public String visit(DataFile df) {
     private String identifier;
     
     private boolean identifierRegistered;
+        
+    private transient GlobalId globalId = null;
     
     @OneToMany(mappedBy = "dvObject", cascade = CascadeType.ALL, orphanRemoval = true)
     private Set<AlternativePersistentIdentifier> alternativePersistentIndentifiers;
@@ -159,6 +178,9 @@ public void setAlternativePersistentIndentifiers(Set<AlternativePersistentIdenti
      */
     private boolean previewImageAvailable;
     
+    @OneToOne(mappedBy = "definitionPoint",cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}, orphanRemoval=true)
+    private StorageQuota storageQuota;
+    
     public boolean isPreviewImageAvailable() {
         return previewImageAvailable;
     }
@@ -166,7 +188,23 @@ public boolean isPreviewImageAvailable() {
     public void setPreviewImageAvailable(boolean status) {
         this.previewImageAvailable = status;
     }
+    
+    /**
+     * Indicates whether a previous attempt to generate a preview image has failed,
+     * regardless of size. This could be due to the file not being accessible, or a
+     * real failure in generating the thumbnail. In both cases, we won't want to try
+     * again every time the preview/thumbnail is requested for a view.
+     */
+    private boolean previewImageFail;
 
+    public boolean isPreviewImageFail() {
+        return previewImageFail;
+    }
+
+    public void setPreviewImageFail(boolean previewImageFail) {
+        this.previewImageFail = previewImageFail;
+    }
+    
     public Timestamp getModificationTime() {
         return modificationTime;
     }
@@ -272,6 +310,8 @@ public String getProtocol() {
 
     public void setProtocol(String protocol) {
         this.protocol = protocol;
+        //Remove cached value
+        globalId=null;
     }
 
     public String getAuthority() {
@@ -280,6 +320,8 @@ public String getAuthority() {
 
     public void setAuthority(String authority) {
         this.authority = authority;
+        //Remove cached value
+        globalId=null;
     }
 
     public Date getGlobalIdCreateTime() {
@@ -296,6 +338,8 @@ public String getIdentifier() {
 
     public void setIdentifier(String identifier) {
         this.identifier = identifier;
+        //Remove cached value
+        globalId=null;
     }
 
     public boolean isIdentifierRegistered() {
@@ -306,22 +350,13 @@ public void setIdentifierRegistered(boolean identifierRegistered) {
         this.identifierRegistered = identifierRegistered;
     }  
     
-    /**
-     * 
-     * @return This object's global id in a string form.
-     * @deprecated use {@code dvobj.getGlobalId().asString()}.
-     */
-    public String getGlobalIdString() {       
-        final GlobalId globalId = getGlobalId();
-        return globalId != null ? globalId.asString() : null;
-    }
-    
     public void setGlobalId( GlobalId pid ) {
         if ( pid == null ) {
             setProtocol(null);
             setAuthority(null);
             setIdentifier(null);
         } else {
+            //These reset globalId=null
             setProtocol(pid.getProtocol());
             setAuthority(pid.getAuthority());
             setIdentifier(pid.getIdentifier());
@@ -329,10 +364,11 @@ public void setGlobalId( GlobalId pid ) {
     }
     
     public GlobalId getGlobalId() {
-        // FIXME should return NULL when the fields are null. Currenntly, 
-        //       a lot of code depends call this method, so this fix can't be 
-        //       a part of the current PR.
-        return new GlobalId(getProtocol(), getAuthority(), getIdentifier());
+        // Cache this
+        if ((globalId == null) && !(getProtocol() == null || getAuthority() == null || getIdentifier() == null)) {
+            globalId = PidUtil.parseAsGlobalID(getProtocol(), getAuthority(), getIdentifier());
+        }
+        return globalId;
     }
     
     public abstract <T> T accept(Visitor<T> v);
@@ -420,17 +456,7 @@ public String getAuthorString(){
     }
     
     public String getTargetUrl(){
-        if (this instanceof Dataverse){
-            throw new UnsupportedOperationException("Not supported yet.");
-        }
-        if (this instanceof Dataset){
-            return Dataset.TARGET_URL;
-        }
-        if (this instanceof DataFile){
-            return DataFile.TARGET_URL;
-        }
         throw new UnsupportedOperationException("Not supported yet. New DVObject Instance?");
-        
     }
     
     public String getYearPublishedCreated(){
@@ -452,6 +478,14 @@ public void setStorageIdentifier(String storageIdentifier) {
         this.storageIdentifier = storageIdentifier;
     }
     
+    public StorageQuota getStorageQuota() {
+        return storageQuota;
+    }
+    
+    public void setStorageQuota(StorageQuota storageQuota) {
+        this.storageQuota = storageQuota;
+    }
+
     /**
      * 
      * @param other 
@@ -459,6 +493,8 @@ public void setStorageIdentifier(String storageIdentifier) {
      */
     public abstract boolean isAncestorOf( DvObject other );
     
+
     @OneToMany(mappedBy = "definitionPoint",cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}, orphanRemoval=true)
     List<RoleAssignment> roleAssignments;
+    
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
index 6ff01ef3ea8..82057315fbb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectContainer.java
@@ -1,9 +1,14 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.storageuse.StorageUse;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import java.util.Locale;
-import javax.persistence.MappedSuperclass;
+import jakarta.persistence.CascadeType;
+import java.util.Optional;
+
+import jakarta.persistence.MappedSuperclass;
+import jakarta.persistence.OneToOne;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -13,10 +18,8 @@
  */
 @MappedSuperclass
 public abstract class DvObjectContainer extends DvObject {
-	
-    
-    public static final String UNDEFINED_METADATA_LANGUAGE_CODE = "undefined"; //Used in dataverse.xhtml as a non-null selection option value (indicating inheriting the default)
     
+    public static final String UNDEFINED_CODE = "undefined"; //Used in dataverse.xhtml as a non-null selection option value (indicating inheriting the default)
     
     public void setOwner(Dataverse owner) {
         super.setOwner(owner);
@@ -38,6 +41,11 @@ public boolean isEffectivelyPermissionRoot() {
     
     private String metadataLanguage=null;
     
+    private Boolean guestbookAtRequest = null;
+   
+    @OneToOne(mappedBy = "dvObjectContainer",cascade={ CascadeType.REMOVE, CascadeType.PERSIST}, orphanRemoval=true)
+    private StorageUse storageUse;
+    
     public String getEffectiveStorageDriverId() {
         String id = storageDriver;
         if (StringUtils.isBlank(id)) {
@@ -71,7 +79,7 @@ public String getEffectiveMetadataLanguage() {
             if (this.getOwner() != null) {
                 ml = this.getOwner().getEffectiveMetadataLanguage();
             } else {
-                ml = UNDEFINED_METADATA_LANGUAGE_CODE;
+                ml = UNDEFINED_CODE;
             }
         }
         return ml;
@@ -79,13 +87,13 @@ public String getEffectiveMetadataLanguage() {
     
     public String getMetadataLanguage() {
         if (metadataLanguage == null) {
-            return UNDEFINED_METADATA_LANGUAGE_CODE;
+            return UNDEFINED_CODE;
         }
         return metadataLanguage;
     }
 
     public void setMetadataLanguage(String ml) {
-        if (ml != null && ml.equals(UNDEFINED_METADATA_LANGUAGE_CODE)) {
+        if (ml != null && ml.equals(UNDEFINED_CODE)) {
             this.metadataLanguage = null;
         } else {
             this.metadataLanguage = ml;
@@ -93,7 +101,40 @@ public void setMetadataLanguage(String ml) {
     }
     
     public static boolean isMetadataLanguageSet(String mdLang) {
-        return mdLang!=null && !mdLang.equals(UNDEFINED_METADATA_LANGUAGE_CODE);
+        return mdLang!=null && !mdLang.equals(UNDEFINED_CODE);
+    }
+    
+    public boolean getEffectiveGuestbookEntryAtRequest() {
+        boolean gbAtRequest = false;
+        if (guestbookAtRequest==null) {
+            if (this.getOwner() != null) {
+                gbAtRequest = this.getOwner().getEffectiveGuestbookEntryAtRequest();
+            } else {
+                Optional<Boolean> opt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
+                if (opt.isPresent()) {
+                gbAtRequest = opt.get();
+                }
+            }
+        } else {
+            gbAtRequest = guestbookAtRequest;
+        }
+        return gbAtRequest;
+    }
+    
+    public String getGuestbookEntryAtRequest() {
+        if(guestbookAtRequest==null) {
+            return UNDEFINED_CODE;
+        }
+        return Boolean.valueOf(guestbookAtRequest).toString();
+    }
+
+    public void setGuestbookEntryAtRequest(String gbAtRequest) {
+        if (gbAtRequest == null || gbAtRequest.equals(UNDEFINED_CODE)) {
+            this.guestbookAtRequest = null;
+        } else {
+            //Force to true or false
+            this.guestbookAtRequest = Boolean.valueOf(Boolean.parseBoolean(gbAtRequest));
+        }
     }
     
 
@@ -124,5 +165,14 @@ public String getCurationLabelSetName() {
     public void setCurationLabelSetName(String setName) {
         this.externalLabelSetName = setName;
     }
-
+    
+    /**
+     * Should only be used in constructors for DvObjectContainers (Datasets and 
+     * Collections), to make sure new entries are created and persisted in the 
+     * database StorageUse table for every DvObject container we create.
+     * @param storageUse 
+     */
+    public void setStorageUse(StorageUse storageUse) {
+        this.storageUse = storageUse;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
index 01b0890d588..d4219c36149 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DvObjectServiceBean.java
@@ -1,6 +1,8 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
+
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Date;
@@ -10,17 +12,18 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+
 import org.apache.commons.lang3.StringUtils;
-import org.ocpsoft.common.util.Strings;
 
 /**
  * Your goto bean for everything {@link DvObject}, that's not tied to any
@@ -79,46 +82,108 @@ public boolean checkExists(Long id) {
         Long result =(Long)query.getSingleResult();
         return result > 0;
     }   
-    // FIXME This type-by-string has to go, in favor of passing a class parameter.
-    public DvObject findByGlobalId(String globalIdString, String typeString) {
-        return findByGlobalId(globalIdString, typeString, false);
+
+    public DvObject findByGlobalId(String globalIdString, DvObject.DType dtype) {
+        try {
+            GlobalId gid = PidUtil.parseAsGlobalID(globalIdString);
+            return findByGlobalId(gid, dtype);
+        } catch (IllegalArgumentException iae) {
+            logger.fine("Invalid identifier: " + globalIdString);
+            return null;
+        }
+
     }
     
-        // FIXME This type-by-string has to go, in favor of passing a class parameter.
-    public DvObject findByGlobalId(String globalIdString, String typeString, Boolean altId) {
-
+    public DvObject findByAltGlobalId(String globalIdString, DvObject.DType dtype) {
         try {
-            GlobalId gid = new GlobalId(globalIdString);
+            GlobalId gid = PidUtil.parseAsGlobalID(globalIdString);
+            return findByAltGlobalId(gid, dtype);
+        } catch (IllegalArgumentException iae) {
+            logger.fine("Invalid alternate identifier: " + globalIdString);
+            return null;
+        }
 
-            DvObject foundDvObject = null;
-            try {
-                Query query;                                
-                if (altId) {
-                   query = em.createNamedQuery("DvObject.findByAlternativeGlobalId"); 
-                } else{
-                   query = em.createNamedQuery("DvObject.findByGlobalId");
-                }
-                query.setParameter("identifier", gid.getIdentifier());
-                query.setParameter("protocol", gid.getProtocol());
-                query.setParameter("authority", gid.getAuthority());
-                query.setParameter("dtype", typeString);
-                foundDvObject = (DvObject) query.getSingleResult();
-            } catch (javax.persistence.NoResultException e) {
-                // (set to .info, this can fill the log file with thousands of
-                // these messages during a large harvest run)
-                logger.fine("no dvObject found: " + globalIdString);
-                // DO nothing, just return null.
-                return null;
-            } catch (Exception ex) {
-                logger.info("Exception caught in findByGlobalId: " + ex.getLocalizedMessage());
-                return null;
-            }
-            return foundDvObject;
+    }
 
-        } catch (IllegalArgumentException iae) {
-            logger.info("Invalid identifier: " + globalIdString);
+    public DvObject findByGlobalId(GlobalId globalId, DvObject.DType dtype) {
+        Query query = em.createNamedQuery("DvObject.findByGlobalId");
+        return runFindByGlobalId(query, globalId, dtype);
+    }
+
+    public DvObject findByAltGlobalId(GlobalId globalId, DvObject.DType dtype) {
+        Query query = em.createNamedQuery("DvObject.findByAlternativeGlobalId");
+        return runFindByGlobalId(query, globalId, dtype);
+    }
+
+    public Long findIdByGlobalId(GlobalId globalId, DvObject.DType dtype) {
+        Query query = em.createNamedQuery("DvObject.findIdByGlobalId");
+        return runFindIdByGlobalId(query, globalId, dtype);
+    }
+
+    public Long findIdByAltGlobalId(GlobalId globalId, DvObject.DType dtype) {
+        Query query = em.createNamedQuery("DvObject.findIdByAlternativeGlobalId");
+        return runFindIdByGlobalId(query, globalId, dtype);
+    }
+
+    private DvObject runFindByGlobalId(Query query, GlobalId gid, DvObject.DType dtype) {
+        DvObject foundDvObject = null;
+        try {
+            query.setParameter("identifier", gid.getIdentifier());
+            query.setParameter("protocol", gid.getProtocol());
+            query.setParameter("authority", gid.getAuthority());
+            query.setParameter("dtype", dtype.getDType());
+            foundDvObject = (DvObject) query.getSingleResult();
+        } catch (NoResultException e) {
+            // (set to .info, this can fill the log file with thousands of
+            // these messages during a large harvest run)
+            logger.fine("no dvObject found: " + gid.asString());
+            // DO nothing, just return null.
+            return null;
+        } catch (Exception ex) {
+            logger.info("Exception caught in findByGlobalId: " + ex.getLocalizedMessage());
+            return null;
+        }
+        return foundDvObject;
+    }
+
+    private Long runFindIdByGlobalId(Query query, GlobalId gid, DvObject.DType dtype) {
+        Long foundDvObject = null;
+        try {
+            query.setParameter("identifier", gid.getIdentifier());
+            query.setParameter("protocol", gid.getProtocol());
+            query.setParameter("authority", gid.getAuthority());
+            query.setParameter("dtype", dtype.getDType());
+            foundDvObject = (Long) query.getSingleResult();
+        } catch (NoResultException e) {
+            // (set to .info, this can fill the log file with thousands of
+            // these messages during a large harvest run)
+            logger.fine("no dvObject found: " + gid.asString());
+            // DO nothing, just return null.
+            return null;
+        } catch (Exception ex) {
+            logger.info("Exception caught in findByGlobalId: " + ex.getLocalizedMessage());
             return null;
         }
+        return foundDvObject;
+    }
+    
+    public DvObject findByGlobalId(GlobalId globalId) {
+        try {
+            return (DvObject) em.createNamedQuery("DvObject.findByProtocolIdentifierAuthority")
+                    .setParameter("identifier", globalId.getIdentifier())
+                    .setParameter("authority", globalId.getAuthority()).setParameter("protocol", globalId.getProtocol())
+                    .getSingleResult();
+        } catch (NoResultException nre) {
+            return null;
+        }
+    }
+    
+    public boolean isGlobalIdLocallyUnique(GlobalId globalId) {
+        return em.createNamedQuery("DvObject.findByProtocolIdentifierAuthority")
+            .setParameter("identifier", globalId.getIdentifier())
+            .setParameter("authority", globalId.getAuthority())
+            .setParameter("protocol", globalId.getProtocol())
+            .getResultList().isEmpty();
     }
 
     public DvObject updateContentIndexTime(DvObject dvObject) {
@@ -257,7 +322,7 @@ public Map<Long, String> getObjectPathsByIds(Set<Long> objectIds){
             return null;
         }
         
-        String datasetIdStr = Strings.join(objectIds, ", ");
+        String datasetIdStr = StringUtils.join(objectIds, ", ");
         
         String qstr = "WITH RECURSIVE path_elements AS ((" +
             " SELECT id, owner_id FROM dvobject WHERE id in (" + datasetIdStr + "))" +
@@ -317,4 +382,11 @@ public Map<Long, String> getObjectPathsByIds(Set<Long> objectIds){
         }
         return ret;        
     }
+    
+    public String generateNewIdentifierByStoredProcedure() {
+        StoredProcedureQuery query = this.em.createNamedStoredProcedureQuery("Dataset.generateIdentifierFromStoredProcedure");
+        query.execute();
+        return (String) query.getOutputParameterValue(1);
+    }
+    
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java b/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java
index 1bf6bee82eb..883baeedef4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDataFilesPageHelper.java
@@ -4,8 +4,8 @@
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import org.apache.commons.text.StringEscapeUtils;
 
-import javax.ejb.Stateless;
-import javax.inject.Inject;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Optional;
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index 1c033b37872..a6f31e24764 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -28,6 +28,7 @@
 import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand;
 import edu.harvard.iq.dataverse.ingest.IngestRequest;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.ingest.IngestUtil;
@@ -36,10 +37,10 @@
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.Setting;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.JsfHelper;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.WebloaderUtil;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.EjbUtil;
@@ -58,23 +59,23 @@
 import java.util.Map;
 import java.util.Optional;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import org.primefaces.event.FileUploadEvent;
 import org.primefaces.model.file.UploadedFile;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonArray;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonReader;
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.httpclient.methods.GetMethod;
@@ -82,10 +83,10 @@
 import java.util.Collection;
 import java.util.Set;
 import java.util.logging.Level;
-import javax.faces.event.AjaxBehaviorEvent;
-import javax.faces.event.FacesEvent;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.faces.event.AjaxBehaviorEvent;
+import jakarta.faces.event.FacesEvent;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.mutable.MutableBoolean;
 import org.primefaces.PrimeFaces;
@@ -187,7 +188,13 @@ public enum Referrer {
     // Used to store results of permissions checks
     private final Map<String, Boolean> datasetPermissionMap = new HashMap<>(); // { Permission human_name : Boolean }
 
+    // Size limit of an individual file: (set for the storage volume used)
     private Long maxFileUploadSizeInBytes = null;
+    // Total amount of data that the user should be allowed to upload.
+    // Will be calculated in real time based on various level quotas - 
+    // for this user and/or this collection/dataset, etc. We should 
+    // assume that it may change during the user session.
+    private Long maxTotalUploadSizeInBytes = null;
     private Long maxIngestSizeInBytes = null;
     // CSV: 4.8 MB, DTA: 976.6 KB, XLSX: 5.7 MB, etc.
     private String humanPerFormatTabularLimits = null;
@@ -199,6 +206,7 @@ public enum Referrer {
     private final int NUMBER_OF_SCROLL_ROWS = 25;
 
     private DataFile singleFile = null;
+    private UploadSessionQuotaLimit uploadSessionQuota = null; 
 
     public DataFile getSingleFile() {
         return singleFile;
@@ -341,6 +349,18 @@ public boolean isUnlimitedUploadFileSize() {
 
         return this.maxFileUploadSizeInBytes == null;
     }
+    
+    public Long getMaxTotalUploadSizeInBytes() {
+        return maxTotalUploadSizeInBytes;
+    }
+    
+    public String getHumanMaxTotalUploadSizeInBytes() {
+        return FileSizeChecker.bytesToHumanReadable(maxTotalUploadSizeInBytes);
+    }
+    
+    public boolean isStorageQuotaEnforced() {
+        return uploadSessionQuota != null; 
+    }
 
     public Long getMaxIngestSizeInBytes() {
         return maxIngestSizeInBytes;
@@ -509,15 +529,28 @@ public String initCreateMode(String modeToken, DatasetVersion version, MutableBo
         selectedFiles = selectedFileMetadatasList;
 
         this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
+        if (systemConfig.isStorageQuotasEnforced()) {
+            this.uploadSessionQuota = datafileService.getUploadSessionQuotaLimit(dataset);
+            if (this.uploadSessionQuota != null) {
+                this.maxTotalUploadSizeInBytes = uploadSessionQuota.getRemainingQuotaInBytes();
+            }
+        } else {
+            this.maxTotalUploadSizeInBytes = null; 
+        }
         this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit();
         this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits();
         this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit();
-
+        
         logger.fine("done");
 
         saveEnabled = true;
+        
         return null;
     }
+    
+    public boolean isQuotaExceeded() {
+        return systemConfig.isStorageQuotasEnforced() && uploadSessionQuota != null && uploadSessionQuota.getRemainingQuotaInBytes() == 0;
+    }
 
     public String init() {
         // default mode should be EDIT
@@ -560,10 +593,15 @@ public String init() {
         
         clone = workingVersion.cloneDatasetVersion();
         this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
+        if (systemConfig.isStorageQuotasEnforced()) {
+            this.uploadSessionQuota = datafileService.getUploadSessionQuotaLimit(dataset);
+            if (this.uploadSessionQuota != null) {
+                this.maxTotalUploadSizeInBytes = uploadSessionQuota.getRemainingQuotaInBytes();
+            }
+        }
         this.maxIngestSizeInBytes = systemConfig.getTabularIngestSizeLimit();
         this.humanPerFormatTabularLimits = populateHumanPerFormatTabularLimits();
         this.multipleUploadFilesLimit = systemConfig.getMultipleUploadFilesLimit();        
-        this.maxFileUploadSizeInBytes = systemConfig.getMaxFileUploadSizeForStore(dataset.getEffectiveStorageDriverId());
         
         hasValidTermsOfAccess = isHasValidTermsOfAccess();
         if (!hasValidTermsOfAccess) {
@@ -657,7 +695,7 @@ public String init() {
         if (isHasPublicStore()){
             JH.addMessage(FacesMessage.SEVERITY_WARN, getBundleString("dataset.message.label.fileAccess"), getBundleString("dataset.message.publicInstall"));
         }
-
+       
         return null;
     }
 
@@ -1064,7 +1102,7 @@ public String save() {
             }
 
             // Try to save the NEW files permanently: 
-            List<DataFile> filesAdded = ingestService.saveAndAddFilesToDataset(workingVersion, newFiles, null, true);
+            List<DataFile> filesAdded = ingestService.saveAndAddFilesToDataset(workingVersion, newFiles, null, true); 
             
             // reset the working list of fileMetadatas, as to only include the ones
             // that have been added to the version successfully: 
@@ -1494,14 +1532,16 @@ public void handleDropBoxUpload(ActionEvent event) {
                 // for example, multiple files can be extracted from an uncompressed
                 // zip file.
                 //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream");
-                CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig);
+                //CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream", null, null, systemConfig);
+                Command<CreateDataFileResult> cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, dropBoxStream, fileName, "application/octet-stream", null, uploadSessionQuota, null);
+                CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
                 datafiles = createDataFilesResult.getDataFiles();
                 Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage));
 
-            } catch (IOException ex) {
+            } catch (CommandException ex) {
                 this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}", new Object[]{fileName, fileLink});
                 continue;
-            }/*catch (FileExceedsMaxSizeException ex){
+            } /*catch (FileExceedsMaxSizeException ex){
                 this.logger.log(Level.SEVERE, "Error during ingest of DropBox file {0} from link {1}: {2}", new Object[]{fileName, fileLink, ex.getMessage()});
                 continue;
             }*/ finally {
@@ -2024,7 +2064,21 @@ public void handleFileUpload(FileUploadEvent event) throws IOException {
             // Note: A single uploaded file may produce multiple datafiles - 
             // for example, multiple files can be extracted from an uncompressed
             // zip file.
-            CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig);
+            ///CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, null, systemConfig);
+            
+            Command<CreateDataFileResult> cmd;
+            if (mode == FileEditMode.CREATE) {
+                // This is a file upload in the context of creating a brand new
+                // dataset that does not yet exist in the database. We must 
+                // use the version of the Create New Files constructor that takes
+                // the parent Dataverse as the extra argument:
+                cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, uploadSessionQuota, null, null, null, workingVersion.getDataset().getOwner());
+            } else {
+                cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, uFile.getInputStream(), uFile.getFileName(), uFile.getContentType(), null, uploadSessionQuota, null);
+            }
+            CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
+
+        
             dFileList = createDataFilesResult.getDataFiles();
             String createDataFilesError = editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult);
             if(createDataFilesError != null) {
@@ -2033,8 +2087,14 @@ public void handleFileUpload(FileUploadEvent event) throws IOException {
             }
 
         } catch (IOException ioex) {
+            // shouldn't we try and communicate to the user what happened?
             logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ioex.getMessage());
             return;
+        } catch (CommandException cex) {
+            // shouldn't we try and communicate to the user what happened?
+            errorMessages.add(cex.getMessage());
+            uploadComponentId = event.getComponent().getClientId();
+            return;
         }
         /*catch (FileExceedsMaxSizeException ex) {
             logger.warning("Failed to process and/or save the file " + uFile.getFileName() + "; " + ex.getMessage());
@@ -2112,6 +2172,11 @@ public void handleExternalUpload() {
                 - Max size NOT specified in db: default is unlimited
                 - Max size specified in db: check too make sure file is within limits
             // ---------------------------- */
+            /**
+             * @todo: this file size limit check is now redundant here, since the new
+             * CreateNewFilesCommand is going to perform it (and the quota 
+             * checks too, if enabled
+             */
             if ((!this.isUnlimitedUploadFileSize()) && (fileSize > this.getMaxFileUploadSizeInBytes())) {
                 String warningMessage = "Uploaded file \"" + fileName + "\" exceeded the limit of " + fileSize + " bytes and was not uploaded.";
                 sio.delete();
@@ -2131,18 +2196,27 @@ public void handleExternalUpload() {
                 List<DataFile> datafiles = new ArrayList<>();
 
                 // -----------------------------------------------------------
-                // Send it through the ingest service
+                // Execute the CreateNewDataFiles command:
                 // -----------------------------------------------------------
+                
+                Dataverse parent = null; 
+                
+                if (mode == FileEditMode.CREATE) {
+                    // This is a file upload in the context of creating a brand new
+                    // dataset that does not yet exist in the database. We must 
+                    // pass the parent Dataverse to the CreateNewFiles command
+                    // constructor. The RequiredPermission on the command in this 
+                    // scenario = Permission.AddDataset on the parent dataverse.
+                    parent = workingVersion.getDataset().getOwner();
+                }
+                
                 try {
-
-                    // Note: A single uploaded file may produce multiple datafiles - 
-                    // for example, multiple files can be extracted from an uncompressed
-                    // zip file.
-                    //datafiles = ingestService.createDataFiles(workingVersion, dropBoxStream, fileName, "application/octet-stream");
-                    CreateDataFileResult createDataFilesResult = FileUtil.createDataFiles(workingVersion, null, fileName, contentType, fullStorageIdentifier, checksumValue, checksumType, systemConfig);
+  
+                    Command<CreateDataFileResult> cmd = new CreateNewDataFilesCommand(dvRequestService.getDataverseRequest(), workingVersion, null, fileName, contentType, fullStorageIdentifier, uploadSessionQuota, checksumValue, checksumType, fileSize, parent);
+                    CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
                     datafiles = createDataFilesResult.getDataFiles();
                     Optional.ofNullable(editDataFilesPageHelper.getHtmlErrorMessage(createDataFilesResult)).ifPresent(errorMessage -> errorMessages.add(errorMessage));
-                } catch (IOException ex) {
+                } catch (CommandException ex) {
                     logger.log(Level.SEVERE, "Error during ingest of file {0}", new Object[]{fileName});
                 }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
index b4efe7ec41d..5a689c06019 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java
@@ -18,19 +18,20 @@
 import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.search.IndexBatchServiceBean;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.search.SearchServiceBean;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Set;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.storageuse.StorageUseServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
@@ -40,16 +41,16 @@
 import java.util.Stack;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.Resource;
-import javax.ejb.EJBContext;
-import javax.ejb.EJBException;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import static javax.ejb.TransactionAttributeType.SUPPORTS;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
+import jakarta.annotation.Resource;
+import jakarta.ejb.EJBContext;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import static jakarta.ejb.TransactionAttributeType.SUPPORTS;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
 
 /**
  * An EJB capable of executing {@link Command}s in a JEE environment.
@@ -124,6 +125,9 @@ public class EjbDataverseEngine {
     @EJB
     HandlenetServiceBean handleNet;
     
+    @EJB
+    PermaLinkPidProviderServiceBean permaLinkProvider;
+    
     @EJB
     SettingsServiceBean settings;
     
@@ -181,6 +185,9 @@ public class EjbDataverseEngine {
     @EJB
     ConfirmEmailServiceBean confirmEmailService;
     
+    @EJB
+    StorageUseServiceBean storageUseService; 
+    
     @EJB
     EjbDataverseEngineInner innerEngine;
     
@@ -496,6 +503,11 @@ public HandlenetServiceBean handleNet() {
                     return handleNet;
                 }
 
+                @Override
+                public PermaLinkPidProviderServiceBean permaLinkProvider() {
+                    return permaLinkProvider;
+                }
+                
                 @Override
                 public SettingsServiceBean settings() {
                     return settings;
@@ -520,6 +532,12 @@ public DataverseLinkingServiceBean dvLinking() {
                 public DatasetLinkingServiceBean dsLinking() {
                     return dsLinking;
                 }
+                
+                @Override
+                public StorageUseServiceBean storageUse() {
+                    return storageUseService;
+                }
+                
                 @Override
                 public DataverseEngine engine() {
                     return new DataverseEngine() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java
index d8339dce856..891fe91dc66 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngineInner.java
@@ -4,13 +4,13 @@
 import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import javax.annotation.Resource;
-import javax.ejb.EJBContext;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRED;
+import jakarta.annotation.Resource;
+import jakarta.ejb.EJBContext;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRED;
 
-import javax.inject.Named;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/Embargo.java b/src/main/java/edu/harvard/iq/dataverse/Embargo.java
index eac83edd296..29959b9f2d4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Embargo.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Embargo.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse;
 
 
-import javax.persistence.*;
+import jakarta.persistence.*;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java
index afbeab404c7..d0a8d214959 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EmbargoServiceBean.java
@@ -3,12 +3,12 @@
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 import java.util.List;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java b/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java
index 3618da79630..7ebfa0302ac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ExternalVocabularyValue.java
@@ -9,13 +9,13 @@
 import java.io.Serializable;
 import java.sql.Timestamp;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java b/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java
index 75ef62200bf..fd41315dbc0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FacetConverter.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java
index e7362587c36..d4d701cb02f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FeaturedDataverseServiceBean.java
@@ -9,11 +9,11 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java
new file mode 100644
index 00000000000..51c67a37a09
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequest.java
@@ -0,0 +1,200 @@
+package edu.harvard.iq.dataverse;
+
+import java.io.Serializable;
+import java.util.Date;
+
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+
+/**
+ *
+ * @author Marina
+ */
+
+@Entity
+@Table(name = "fileaccessrequests")
+
+@NamedQueries({
+        @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserId",
+                query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId"),
+        @NamedQuery(name = "FileAccessRequest.findByGuestbookResponseId",
+                query = "SELECT far FROM FileAccessRequest far WHERE far.guestbookResponse.id=:guestbookResponseId"),
+        @NamedQuery(name = "FileAccessRequest.findByDataFileId",
+                query = "SELECT far FROM FileAccessRequest far WHERE far.dataFile.id=:dataFileId"),
+        @NamedQuery(name = "FileAccessRequest.findByRequestState",
+                query = "SELECT far FROM FileAccessRequest far WHERE far.requestState=:requestState"),
+        @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserIdAndRequestState",
+                query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId and far.requestState=:requestState"),
+        @NamedQuery(name = "FileAccessRequest.findByGuestbookResponseIdAndRequestState",
+                query = "SELECT far FROM FileAccessRequest far WHERE far.guestbookResponse.id=:guestbookResponseId and far.requestState=:requestState"),
+        @NamedQuery(name = "FileAccessRequest.findByDataFileIdAndRequestState",
+                query = "SELECT far FROM FileAccessRequest far WHERE far.dataFile.id=:dataFileId and far.requestState=:requestState"),
+        @NamedQuery(name = "FileAccessRequest.findByAuthenticatedUserIdAndDataFileIdAndRequestState",
+                query = "SELECT far FROM FileAccessRequest far WHERE far.user.id=:authenticatedUserId and far.dataFile.id=:dataFileId and far.requestState=:requestState")
+})
+
+
+public class FileAccessRequest implements Serializable{
+    private static final long serialVersionUID = 1L;
+    @Id
+    @GeneratedValue(strategy = GenerationType.IDENTITY)
+    private Long id;
+
+    @ManyToOne
+    @JoinColumn(nullable=false)
+    private DataFile dataFile;
+    
+    @ManyToOne
+    @JoinColumn(name="authenticated_user_id",nullable=false)
+    private AuthenticatedUser user;
+    
+    @OneToOne
+    @JoinColumn(nullable=true)
+    private GuestbookResponse guestbookResponse;
+    
+    public enum RequestState {CREATED, GRANTED, REJECTED};
+    //private RequestState state;
+    @Enumerated(EnumType.STRING)
+    @Column(name="request_state", nullable=false )
+    private RequestState requestState;
+    
+    @Temporal(value = TemporalType.TIMESTAMP)
+    @Column(name = "creation_time")
+    private Date creationTime;
+    
+    public FileAccessRequest(){
+    }
+    
+    public FileAccessRequest(DataFile df, AuthenticatedUser au){
+        setDataFile(df);
+        setRequester(au);
+        setState(RequestState.CREATED);
+        setCreationTime(new Date());
+    }
+    
+    public FileAccessRequest(DataFile df, AuthenticatedUser au, GuestbookResponse gbr){
+        this(df, au);
+        setGuestbookResponse(gbr);
+    }
+     
+    public Long getId() {
+        return id;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+    
+    public DataFile getDataFile(){
+        return dataFile;
+    }
+    
+    public final void setDataFile(DataFile df){
+        this.dataFile = df;
+    }
+    
+    public AuthenticatedUser getRequester(){
+        return user;
+    }
+    
+    public final void setRequester(AuthenticatedUser au){
+        this.user = au;
+    }
+    
+    public GuestbookResponse getGuestbookResponse(){
+        return guestbookResponse;
+    }
+    
+    public final void setGuestbookResponse(GuestbookResponse gbr){
+        this.guestbookResponse = gbr;
+    }
+    
+    public RequestState getState() {
+        return this.requestState;
+    }
+    
+    public void setState(RequestState requestState) {
+        this.requestState = requestState;
+    }
+    
+    public String getStateLabel() {
+        if(isStateCreated()){
+            return "created";
+        }
+        if(isStateGranted()) {
+            return "granted";
+        }
+        if(isStateRejected()) {
+            return "rejected";
+        }
+        return null; 
+    }
+    
+    public void setStateCreated() {
+        this.requestState = RequestState.CREATED;
+    }
+    
+    public void setStateGranted() {
+        this.requestState = RequestState.GRANTED;
+    }
+
+    public void setStateRejected() {
+        this.requestState = RequestState.REJECTED;
+    }
+
+    public boolean isStateCreated() {
+        return this.requestState == RequestState.CREATED;
+    }
+   
+    public boolean isStateGranted() {
+        return this.requestState == RequestState.GRANTED;
+    }
+
+    public boolean isStateRejected() {
+        return this.requestState == RequestState.REJECTED;
+    }
+    
+    @Override
+    public int hashCode() {
+        int hash = 0;
+        hash += (id != null ? id.hashCode() : 0);
+        return hash;
+    }
+
+    public Date getCreationTime() {
+        return creationTime;
+    }
+
+    public void setCreationTime(Date creationTime) {
+        this.creationTime = creationTime;
+    }
+    
+    @Override
+    public boolean equals(Object object) {
+        // TODO: Warning - this method won't work in the case the id fields are not set
+        if (!(object instanceof FileAccessRequest)) {
+            return false;
+        }
+        FileAccessRequest other = (FileAccessRequest) object;
+        if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
+            return false;
+        }
+        return true;
+    }
+    
+    
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java
new file mode 100644
index 00000000000..af8577fad34
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/FileAccessRequestServiceBean.java
@@ -0,0 +1,87 @@
+package edu.harvard.iq.dataverse;
+
+import java.util.List;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+
+/**
+ *
+ * @author Marina
+ */
+@Stateless
+@Named
+public class FileAccessRequestServiceBean {
+
+    @PersistenceContext(unitName = "VDCNet-ejbPU")
+    private EntityManager em;
+    
+    public FileAccessRequest find(Object pk) {
+        return em.find(FileAccessRequest.class, pk);
+    }
+
+    public List<FileAccessRequest> findAll() {
+        return em.createQuery("select object(o) from FileAccessRequest as o order by o.id", FileAccessRequest.class).getResultList();
+    }
+    
+    public List<FileAccessRequest> findAll(Long authenticatedUserId, Long fileId, FileAccessRequest.RequestState requestState){
+        return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserIdAndDataFileIdAndRequestState", FileAccessRequest.class)
+                .setParameter("authenticatedUserId",authenticatedUserId)
+                .setParameter("dataFileId",fileId)
+                .setParameter("requestState",requestState)
+                .getResultList();
+    }
+    
+    public List<FileAccessRequest> findAllByAuthenticedUserId(Long authenticatedUserId){
+        return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserId", FileAccessRequest.class)
+                        .setParameter("authenticatedUserId", authenticatedUserId)
+                        .getResultList();
+    }
+    
+    public List<FileAccessRequest> findAllByGuestbookResponseId(Long guestbookResponseId){
+        return em.createNamedQuery("FileAccessRequest.findByGuestbookResponseId", FileAccessRequest.class)
+                        .setParameter("guestbookResponseId", guestbookResponseId)
+                        .getResultList();
+    
+    }
+    
+    public List<FileAccessRequest> findAllByDataFileId(Long dataFileId){
+        return em.createNamedQuery("FileAccessRequest.findByDataFileId", FileAccessRequest.class)
+                        .setParameter("dataFileId", dataFileId)
+                        .getResultList();
+    }
+    
+    public List<FileAccessRequest> findAllByAuthenticatedUserIdAndRequestState(Long authenticatedUserId, FileAccessRequest.RequestState requestState){
+        return em.createNamedQuery("FileAccessRequest.findByAuthenticatedUserIdAndRequestState", FileAccessRequest.class)
+                        .setParameter("authenticatedUserId", authenticatedUserId)
+                        .setParameter("requestState",requestState)
+                        .getResultList();
+    }
+    
+    public List<FileAccessRequest> findAllByGuestbookResponseIdAndRequestState(Long guestbookResponseId, FileAccessRequest.RequestState requestState){
+        return em.createNamedQuery("FileAccessRequest.findByGuestbookResponseIdAndRequestState", FileAccessRequest.class)
+                        .setParameter("dataFileId", guestbookResponseId)
+                        .setParameter("requestState",requestState)
+                        .getResultList();
+    }
+    
+    public List<FileAccessRequest> findAllByDataFileIdAndRequestState(Long dataFileId, FileAccessRequest.RequestState requestState){
+        return em.createNamedQuery("FileAccessRequest.findByDataFileIdAndRequestState", FileAccessRequest.class)
+                        .setParameter("dataFileId", dataFileId)
+                        .setParameter("requestState",requestState)
+                        .getResultList();
+    }
+    
+    
+    public FileAccessRequest save(FileAccessRequest far) {
+        if (far.getId() == null) {
+            em.persist(far);
+            return far;
+        } else {
+            return em.merge(far);
+        }
+    }
+  
+    
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java b/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java
index e0c2b83ab65..84c033afcaf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDirectoryNameValidator.java
@@ -7,8 +7,8 @@
 
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownload.java b/src/main/java/edu/harvard/iq/dataverse/FileDownload.java
deleted file mode 100644
index fad03d2a0a1..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/FileDownload.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * To change this license header, choose License Headers in Project Properties.
- * To change this template file, choose Tools | Templates
- * and open the template in the editor.
- */
-package edu.harvard.iq.dataverse;
-
-import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.Transient;
-import javax.persistence.CascadeType;
-import javax.persistence.OneToOne;
-import javax.persistence.MapsId;
-import javax.persistence.FetchType;
-import javax.persistence.JoinColumn;
-import java.util.Date;
-
-
-/**
- *
- * @author marina
- */
-@Entity
-public class FileDownload implements Serializable { 
-    
-    @Id
-    private Long id;
-    
-    @OneToOne(fetch = FetchType.LAZY)
-    @MapsId
-    private GuestbookResponse guestbookResponse;
-    
-    @Temporal(value = TemporalType.TIMESTAMP)
-    private Date downloadTimestamp;
-    
-    /*
-    Transient Values carry non-written information 
-    that will assist in the download process
-    - selected file ids is a comma delimited list that contains the file ids for multiple download
-    - fileFormat tells the download api which format a subsettable file should be downloaded as
-    */
-    
-    @Transient
-    private String selectedFileIds;
-    
-    @Transient 
-    private String fileFormat;
-    
-    
-    /**
-     * Possible values for downloadType include "Download", "Subset",
-     * or the displayName of an ExternalTool.
-     *
-     * TODO: Types like "Download" and "Subset" should
-     * be defined once as constants (likely an enum) rather than having these
-     * strings duplicated in various places when setDownloadtype() is called.
-     */
-    private String downloadtype;
-    private String sessionId;
-    
-     public FileDownload(){
-        
-    }
-     
-    public FileDownload(FileDownload source){
-        this.setDownloadTimestamp(source.getDownloadTimestamp());
-        this.setDownloadtype(source.getDownloadtype());
-        this.setFileFormat(source.getFileFormat());
-        this.setGuestbookResponse(source.getGuestbookResponse());
-        this.setSelectedFileIds(source.getSelectedFileIds());
-        this.setSessionId(source.getSessionId());
-    }
-    
-    public String getFileFormat() {
-        return fileFormat;
-    }
-
-    //for download
-    public void setFileFormat(String downloadFormat) {
-        this.fileFormat = downloadFormat;
-    }
-    
-    public String getDownloadtype() {
-        return downloadtype;
-    }
-
-    public void setDownloadtype(String downloadtype) {
-        this.downloadtype = downloadtype;
-    }
-    
-    public String getSessionId() {
-        return sessionId;
-    }
-
-    public void setSessionId(String sessionId) {
-        this.sessionId = sessionId;
-    }
-    
-    public String getSelectedFileIds() {
-        return selectedFileIds;
-    }
-
-    public void setSelectedFileIds(String selectedFileIds) {
-        this.selectedFileIds = selectedFileIds;
-    }
-    
-    public Long getId() {
-        return id;
-    }
-
-    public void setId(Long id) {
-        this.id = id;
-    }
-    
-    public Date getDownloadTimestamp(){
-        return this.downloadTimestamp;
-    }
-    
-    public void setDownloadTimestamp(Date downloadTimestamp){
-        this.downloadTimestamp = downloadTimestamp;
-    }
-    
-    
-    public void setGuestbookResponse(GuestbookResponse gbr){
-        this.guestbookResponse = gbr;
-    }
-
-    public GuestbookResponse getGuestbookResponse(){
-        return this.guestbookResponse;
-    }
-    
-    @Override
-    public int hashCode() {
-        int hash = 0;
-        hash += (id != null ? id.hashCode() : 0);
-        return hash;
-    }
-
-    @Override
-    public boolean equals(Object object) {
-        // TODO: Warning - this method won't work in the case the id fields are not set
-        if (!(object instanceof FileDownload)) {
-            return false;
-        }
-        FileDownload other = (FileDownload) object;
-        if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
-            return false;
-        }
-        return true;
-    }
-
-    @Override
-    public String toString() {
-        return "edu.harvard.iq.dataverse.FileDownload[ id=" + id + " ]";
-    }
-    
-    
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
index ef7ed1a2010..4d8100124ec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadHelper.java
@@ -9,6 +9,7 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool;
+import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.JsfHelper;
@@ -18,10 +19,10 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.primefaces.PrimeFaces;
 //import org.primefaces.context.RequestContext;
 
@@ -53,6 +54,9 @@ public class FileDownloadHelper implements java.io.Serializable {
     
     @EJB
     DataFileServiceBean datafileService;
+    
+    @EJB
+    GlobusServiceBean globusService;
 
     private final Map<Long, Boolean> fileDownloadPermissionMap = new HashMap<>(); // { FileMetadata.id : Boolean } 
 
@@ -60,40 +64,40 @@ public FileDownloadHelper() {
         this.filesForRequestAccess = new ArrayList<>();
     }
 
-    // See also @Size(max = 255) in GuestbookResponse
-     private boolean testResponseLength(String value) {
-        return !(value != null && value.length() > 255);
-     }
-
     // This helper method is called from the Download terms/guestbook/etc. popup,
     // when the user clicks the "ok" button. We use it, instead of calling
     // downloadServiceBean directly, in order to differentiate between single
-    // file downloads and multiple (batch) downloads - sice both use the same
+    // file downloads and multiple (batch) downloads - since both use the same
     // terms/etc. popup.
-    public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse) {
-        PrimeFaces.current().executeScript("PF('downloadPopup').hide()");
-        guestbookResponse.setDownloadtype("Download");
+    public void writeGuestbookAndStartDownload(GuestbookResponse guestbookResponse, boolean isGlobusTransfer) {
+        PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()");
+        guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD);
          // Note that this method is only ever called from the file-download-popup -
          // meaning we know for the fact that we DO want to save this
          // guestbookResponse permanently in the database.
-        if (guestbookResponse.getSelectedFileIds() != null) {
-            // this is a batch (multiple file) download.
-            // Although here's a chance that this is not really a batch download - i.e.,
-            // there may only be one file on the file list. But the fileDownloadService
-            // method below will check for that, and will redirect to the single download, if
-            // that's the case. -- L.A.
-            fileDownloadService.writeGuestbookAndStartBatchDownload(guestbookResponse);
-        } else if (guestbookResponse.getDataFile() != null) {
-            // this a single file download:
-            fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse);
+        if(isGlobusTransfer) {
+            globusService.writeGuestbookAndStartTransfer(guestbookResponse, true);
+        } else {
+            if (guestbookResponse.getSelectedFileIds() != null) {
+                // this is a batch (multiple file) download.
+                // Although here's a chance that this is not really a batch download - i.e.,
+                // there may only be one file on the file list. But the fileDownloadService
+                // method below will check for that, and will redirect to the single download,
+                // if
+                // that's the case. -- L.A.
+                fileDownloadService.writeGuestbookAndStartBatchDownload(guestbookResponse);
+            } else if (guestbookResponse.getDataFile() != null) {
+                // this a single file download:
+                fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse);
+            }
         }
      }
 
      public void writeGuestbookAndOpenSubset(GuestbookResponse guestbookResponse) {
 
-             PrimeFaces.current().executeScript("PF('downloadPopup').hide()");
+             PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()");
              PrimeFaces.current().executeScript("PF('downloadDataSubsetPopup').show()");
-             guestbookResponse.setDownloadtype("Subset");
+             guestbookResponse.setEventType(GuestbookResponse.SUBSET);
              fileDownloadService.writeGuestbookResponseRecord(guestbookResponse);
 
      }
@@ -132,22 +136,33 @@ public void writeGuestbookAndLaunchExploreTool(GuestbookResponse guestbookRespon
 
          fileDownloadService.explore(guestbookResponse, fmd, externalTool);
          //requestContext.execute("PF('downloadPopup').hide()");
-         PrimeFaces.current().executeScript("PF('downloadPopup').hide()");
+         PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()");
     }
      
     public void writeGuestbookAndLaunchPackagePopup(GuestbookResponse guestbookResponse) {
 
-            PrimeFaces.current().executeScript("PF('downloadPopup').hide()");
+            PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()");
             PrimeFaces.current().executeScript("PF('downloadPackagePopup').show()");
             PrimeFaces.current().executeScript("handleResizeDialog('downloadPackagePopup')");
             fileDownloadService.writeGuestbookResponseRecord(guestbookResponse);
     }
 
+    
+    public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse) {
+
+        if(!filesForRequestAccess.isEmpty()) {
+            /* Only for single file requests (i.e. from kebab menu) */
+            guestbookResponse.setDataFile(filesForRequestAccess.get(0));
+        }
+        PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()");
+        fileDownloadService.writeGuestbookResponseAndRequestAccess(guestbookResponse);
+    }
+    
      /**
       * Writes a guestbook entry for either popup scenario: guestbook or terms.
       */
      public boolean writeGuestbookAndShowPreview(GuestbookResponse guestbookResponse) {
-         guestbookResponse.setDownloadtype("Explore");
+         guestbookResponse.setEventType(GuestbookResponse.EXPLORE);
          fileDownloadService.writeGuestbookResponseRecord(guestbookResponse);
          return true;
      }
@@ -284,7 +299,7 @@ public void handleCommandLinkClick(FileMetadata fmd){
         
         if (FileUtil.isRequestAccessPopupRequired(fmd.getDatasetVersion())){
             addFileForRequestAccess(fmd.getDataFile());
-            PrimeFaces.current().executeScript("PF('requestAccessPopup').show()");
+            PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');");
         } else {
             requestAccess(fmd.getDataFile());
         }
@@ -299,7 +314,7 @@ public void requestAccessMultiple(List<DataFile> files) {
          DataFile notificationFile = null;
          for (DataFile file : files) {
              //Not sending notification via request method so that
-             // we can bundle them up into one nofication at dataset level
+             // we can bundle them up into one notification at dataset level
              test = processRequestAccess(file, false);
              succeeded |= test;
              if (notificationFile == null) {
@@ -307,13 +322,15 @@ public void requestAccessMultiple(List<DataFile> files) {
              }
          }
          if (notificationFile != null && succeeded) {
-             fileDownloadService.sendRequestFileAccessNotification(notificationFile, (AuthenticatedUser) session.getUser());
+             fileDownloadService.sendRequestFileAccessNotification(notificationFile.getOwner(),
+                     notificationFile.getId(), (AuthenticatedUser) session.getUser());
+             JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("file.accessRequested.success"));
          }
      }
     
      public void requestAccessIndirect() {
          //Called when there are multiple files and no popup
-         // or there's a popup with sigular or multiple files
+         // or there's a popup with singular or multiple files
          // The list of files for Request Access is set in the Dataset Page when
          // user clicks the request access button in the files fragment
          // (and has selected one or more files)
@@ -324,15 +341,16 @@ public void requestAccessIndirect() {
      private boolean processRequestAccess(DataFile file, Boolean sendNotification) {
          if (fileDownloadService.requestAccess(file.getId())) {
              // update the local file object so that the page properly updates
-             if(file.getFileAccessRequesters() == null){
-                 file.setFileAccessRequesters(new ArrayList());
-             }
-             file.getFileAccessRequesters().add((AuthenticatedUser) session.getUser());
+             AuthenticatedUser user = (AuthenticatedUser) session.getUser();
+             //This seems to be required because we don't get the updated file object back from the command called in the fileDownloadService.requestAccess call above
+             FileAccessRequest request = new FileAccessRequest(file, user);
+             file.addFileAccessRequest(request);
+
              // create notification if necessary
              if (sendNotification) {
-                 fileDownloadService.sendRequestFileAccessNotification(file, (AuthenticatedUser) session.getUser());
-             }           
-             JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("file.accessRequested.success"));
+                 fileDownloadService.sendRequestFileAccessNotification(file.getOwner(), file.getId(), (AuthenticatedUser) session.getUser());
+                 JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("file.accessRequested.success"));
+             }
              return true;
          }
          JsfHelper.addWarningMessage(BundleUtil.getStringFromBundle("file.accessRequested.alreadyRequested", Arrays.asList(file.getDisplayName())));
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
index aa37c579224..549e6385e40 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java
@@ -4,10 +4,10 @@
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.impl.CreateGuestbookResponseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RequestAccessCommand;
@@ -15,12 +15,13 @@
 import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
-import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
-import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
+import edu.harvard.iq.dataverse.util.JsfHelper;
 import edu.harvard.iq.dataverse.util.StringUtil;
+import edu.harvard.iq.dataverse.util.URLTokenUtil;
+
 import java.io.IOException;
 import java.sql.Timestamp;
 import java.util.ArrayList;
@@ -29,16 +30,16 @@
 import java.util.List;
 import java.util.UUID;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.faces.context.FacesContext;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 
 import org.primefaces.PrimeFaces;
 //import org.primefaces.context.RequestContext;
@@ -73,9 +74,9 @@ public class FileDownloadServiceBean implements java.io.Serializable {
     @EJB
     AuthenticationServiceBean authService;
     @EJB
-    PrivateUrlServiceBean privateUrlService;
-    @EJB
     SettingsServiceBean settingsService;
+    @EJB
+    MailServiceBean mailService;
 
     @Inject
     DataverseSession session;
@@ -192,6 +193,42 @@ public void writeGuestbookAndStartFileDownload(GuestbookResponse guestbookRespon
         redirectToDownloadAPI(guestbookResponse.getFileFormat(), guestbookResponse.getDataFile().getId());
         logger.fine("issued file download redirect for datafile "+guestbookResponse.getDataFile().getId());
     }
+    
+    public void writeGuestbookResponseAndRequestAccess(GuestbookResponse guestbookResponse){
+        if (guestbookResponse == null || ( guestbookResponse.getDataFile() == null && guestbookResponse.getSelectedFileIds() == null) ) {
+            return;
+        }
+
+        guestbookResponse.setEventType(GuestbookResponse.ACCESS_REQUEST);
+
+        List <DataFile> selectedDataFiles = new ArrayList<>(); //always make sure it's at least an empty List
+
+        if(guestbookResponse.getDataFile() != null ){ //one file 'selected' by 'Request Access' button click
+            selectedDataFiles.add(datafileService.find(guestbookResponse.getDataFile().getId())); //don't want the findCheapAndEasy
+        }
+
+        if(guestbookResponse.getSelectedFileIds() != null && !guestbookResponse.getSelectedFileIds().isEmpty()) { //multiple selected through multi-select REquest Access button   
+            selectedDataFiles = datafileService.findAll(guestbookResponse.getSelectedFileIds());
+        }
+
+        int countRequestAccessSuccess = 0;
+
+        for(DataFile dataFile : selectedDataFiles){
+            guestbookResponse.setDataFile(dataFile);
+            writeGuestbookResponseRecordForRequestAccess(guestbookResponse);
+            if(requestAccess(dataFile,guestbookResponse)){
+                countRequestAccessSuccess++;
+            } else {
+                JsfHelper.addWarningMessage(BundleUtil.getStringFromBundle("file.accessRequested.alreadyRequested", Arrays.asList(dataFile.getDisplayName())));
+            }
+        }
+
+        if(countRequestAccessSuccess > 0){
+            DataFile firstDataFile = selectedDataFiles.get(0);
+            sendRequestFileAccessNotification(firstDataFile.getOwner(), firstDataFile.getId(), (AuthenticatedUser) session.getUser());
+            JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("file.accessRequested.success"));
+        }
+    }
 
     public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse, FileMetadata fileMetadata, String format) {
         if(!fileMetadata.getDatasetVersion().isDraft()){           
@@ -221,6 +258,18 @@ public void writeGuestbookResponseRecord(GuestbookResponse guestbookResponse) {
         }
     }
     
+    public void writeGuestbookResponseRecordForRequestAccess(GuestbookResponse guestbookResponse) {
+        try {
+            CreateGuestbookResponseCommand cmd = new CreateGuestbookResponseCommand(dvRequestService.getDataverseRequest(), guestbookResponse, guestbookResponse.getDataset());
+            commandEngine.submit(cmd);
+
+        } catch (CommandException e) {
+            //if an error occurs here then download won't happen no need for response recs...
+            logger.info("Failed to writeGuestbookResponseRecord for RequestAccess");
+        }
+
+    }
+    
     // The "guestBookRecord(s)AlreadyWritten" parameter in the 2 methods 
     // below (redirectToBatchDownloadAPI() and redirectToDownloadAPI(), for the 
     // multiple- and single-file downloads respectively) are passed to the 
@@ -263,13 +312,19 @@ private void redirectToCustomZipDownloadService(String customZipServiceUrl, Stri
         }
     }
 
-    private void redirectToDownloadAPI(String downloadType, Long fileId, boolean guestBookRecordAlreadyWritten, Long fileMetadataId) {
-        String fileDownloadUrl = FileUtil.getFileDownloadUrlPath(downloadType, fileId, guestBookRecordAlreadyWritten, fileMetadataId);
-        logger.fine("Redirecting to file download url: " + fileDownloadUrl);
-        try {
-            FacesContext.getCurrentInstance().getExternalContext().redirect(fileDownloadUrl);
-        } catch (IOException ex) {
-            logger.info("Failed to issue a redirect to file download url (" + fileDownloadUrl + "): " + ex);
+    private void redirectToDownloadAPI(String downloadType, Long fileId, boolean guestBookRecordAlreadyWritten,
+            Long fileMetadataId) {
+        String fileDownloadUrl = FileUtil.getFileDownloadUrlPath(downloadType, fileId, guestBookRecordAlreadyWritten,
+                fileMetadataId);
+        if (downloadType.equals("GlobusTransfer")) {
+            PrimeFaces.current().executeScript(URLTokenUtil.getScriptForUrl(fileDownloadUrl));
+        } else {
+            logger.fine("Redirecting to file download url: " + fileDownloadUrl);
+            try {
+                FacesContext.getCurrentInstance().getExternalContext().redirect(fileDownloadUrl);
+            } catch (IOException ex) {
+                logger.info("Failed to issue a redirect to file download url (" + fileDownloadUrl + "): " + ex);
+            }
         }
     }
     
@@ -299,8 +354,8 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter
         ApiToken apiToken = null;
         User user = session.getUser();
         DatasetVersion version = fmd.getDatasetVersion();
-        if (version.isDraft() || (fmd.getDataFile().isRestricted()) || (FileUtil.isActivelyEmbargoed(fmd))) {
-            apiToken = getApiToken(user);
+        if (version.isDraft() || fmd.getDatasetVersion().isDeaccessioned() || (fmd.getDataFile().isRestricted()) || (FileUtil.isActivelyEmbargoed(fmd))) {
+            apiToken = authService.getValidApiTokenForUser(user);
         }
         DataFile dataFile = null;
         if (fmd != null) {
@@ -313,7 +368,7 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter
         String localeCode = session.getLocaleCode();
         ExternalToolHandler externalToolHandler = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd, localeCode);
         // Persist the name of the tool (i.e. "Data Explorer", etc.)
-        guestbookResponse.setDownloadtype(externalTool.getDisplayName());
+        guestbookResponse.setEventType(externalTool.getDisplayName());
         PrimeFaces.current().executeScript(externalToolHandler.getExploreScript());
         // This is the old logic from TwoRavens, null checks and all.
         if (guestbookResponse != null && guestbookResponse.isWriteResponse()
@@ -327,24 +382,6 @@ public void explore(GuestbookResponse guestbookResponse, FileMetadata fmd, Exter
         }
     }
 
-    public ApiToken getApiToken(User user) {
-        ApiToken apiToken = null;
-        if (user instanceof AuthenticatedUser) {
-            AuthenticatedUser authenticatedUser = (AuthenticatedUser) user;
-            apiToken = authService.findApiTokenByUser(authenticatedUser);
-            if (apiToken == null || apiToken.isExpired()) {
-                //No un-expired token
-                apiToken = authService.generateApiTokenForUser(authenticatedUser);
-            }
-        } else if (user instanceof PrivateUrlUser) {
-            PrivateUrlUser privateUrlUser = (PrivateUrlUser) user;
-            PrivateUrl privateUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId());
-            apiToken = new ApiToken();
-            apiToken.setTokenString(privateUrl.getToken());
-        }
-        return apiToken;
-    }
-
     public void downloadDatasetCitationXML(Dataset dataset) {
         downloadCitationXML(null, dataset, false);
     }
@@ -489,7 +526,7 @@ public boolean requestAccess(Long fileId) {
             return false;
         }
         DataFile file = datafileService.find(fileId);
-        if (!file.getFileAccessRequesters().contains((AuthenticatedUser)session.getUser())) {
+        if (!file.containsActiveFileAccessRequestFromUser(session.getUser())) {
             try {
                 commandEngine.submit(new RequestAccessCommand(dvRequestService.getDataverseRequest(), file));                        
                 return true;
@@ -499,12 +536,33 @@ public boolean requestAccess(Long fileId) {
             }             
         }        
         return false;
-    }    
+    }
     
-    public void sendRequestFileAccessNotification(DataFile datafile, AuthenticatedUser requestor) {
-        permissionService.getUsersWithPermissionOn(Permission.ManageFilePermissions, datafile).stream().forEach((au) -> {
-            userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REQUESTFILEACCESS, datafile.getId(), null, requestor, false);
+    public boolean requestAccess(DataFile dataFile, GuestbookResponse gbr){
+        boolean accessRequested = false;
+        if (dvRequestService.getDataverseRequest().getAuthenticatedUser() == null){
+            return accessRequested;
+        }
+
+        if(!dataFile.containsActiveFileAccessRequestFromUser(session.getUser())) {
+            try {
+                commandEngine.submit(new RequestAccessCommand(dvRequestService.getDataverseRequest(), dataFile, gbr));
+                accessRequested = true;
+            } catch (CommandException ex) {
+                logger.info("Unable to request access for file id " + dataFile.getId() + ". Exception: " + ex);
+            }
+        } 
+
+        return accessRequested;
+    }
+    
+    public void sendRequestFileAccessNotification(Dataset dataset, Long fileId, AuthenticatedUser requestor) {
+        Timestamp ts = new Timestamp(new Date().getTime());
+        permissionService.getUsersWithPermissionOn(Permission.ManageDatasetPermissions, dataset).stream().forEach((au) -> {
+            userNotificationService.sendNotification(au, ts, UserNotification.Type.REQUESTFILEACCESS, fileId, null, requestor, true);
         });
+        //send the user that requested access a notification that they requested the access
+        userNotificationService.sendNotification(requestor, ts, UserNotification.Type.REQUESTEDFILEACCESS, fileId, null, requestor, true);
 
     } 
     
@@ -572,5 +630,4 @@ public String getDirectStorageLocatrion(String storageLocation) {
             
         return null; 
     }
-    
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
index fc31d0867ed..461c8b14e46 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileMetadata.java
@@ -13,30 +13,33 @@
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Comparator;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.CascadeType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
-import javax.persistence.Version;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.JoinTable;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
+import jakarta.persistence.Version;
 
 import edu.harvard.iq.dataverse.datavariable.CategoryMetadata;
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
@@ -46,12 +49,12 @@
 import edu.harvard.iq.dataverse.util.StringUtil;
 import java.util.HashSet;
 import java.util.Set;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
 import org.hibernate.validator.constraints.NotBlank;
-import javax.validation.constraints.Pattern;
+import jakarta.validation.constraints.Pattern;
 
 
 /**
@@ -203,6 +206,25 @@ public void setVarGroups(List<VarGroup> varGroups) {
     private List<DataFileCategory> fileCategories;
     
     public List<DataFileCategory> getCategories() {
+        if (fileCategories != null) {
+            /*
+             * fileCategories can sometimes be an
+             * org.eclipse.persistence.indirection.IndirectList When that happens, the
+             * comparator in the Collections.sort below is not called, possibly due to
+             * https://bugs.eclipse.org/bugs/show_bug.cgi?id=446236 which is Java 1.8+
+             * specific Converting to an ArrayList solves the problem, but the longer term
+             * solution may be in avoiding the IndirectList or moving to a new version of
+             * the jar it is in.
+             */
+            if (!(fileCategories instanceof ArrayList)) {
+                List<DataFileCategory> newDFCs = new ArrayList<DataFileCategory>();
+                for (DataFileCategory fdc : fileCategories) {
+                    newDFCs.add(fdc);
+                }
+                setCategories(newDFCs);
+            }
+            Collections.sort(fileCategories, FileMetadata.compareByNameWithSortCategories);
+        }
         return fileCategories;
     }
     
@@ -228,7 +250,7 @@ public List<String> getCategoriesByName() {
             return ret;
         }
         
-        for (DataFileCategory fileCategory : fileCategories) {
+        for (DataFileCategory fileCategory : getCategories()) {
             ret.add(fileCategory.getName());
         }
         // fileCategories.stream()
@@ -536,7 +558,7 @@ public boolean compareContent(FileMetadata other){
     
     @Override
     public String toString() {
-        return "edu.harvard.iq.dvn.core.study.FileMetadata[id=" + id + "]";
+        return "edu.harvard.iq.dataverse.FileMetadata[id=" + id + "]";
     }
     
     public static final Comparator<FileMetadata> compareByLabel = new Comparator<FileMetadata>() {
@@ -546,28 +568,37 @@ public int compare(FileMetadata o1, FileMetadata o2) {
         }
     };
     
-    public static final Comparator<FileMetadata> compareByLabelAndFolder = new Comparator<FileMetadata>() {
+    static Map<String,Long> categoryMap=null;
+    
+    public static void setCategorySortOrder(String categories) {
+       categoryMap=new HashMap<String, Long>();
+       long i=1;
+       for(String cat: categories.split(",\\s*")) {
+           categoryMap.put(cat.toUpperCase(), i);
+           i++;
+       }
+    }
+    
+    public static Map<String,Long> getCategorySortOrder() {
+        return categoryMap;
+    }
+    
+    
+    public static final Comparator<DataFileCategory> compareByNameWithSortCategories = new Comparator<DataFileCategory>() {
         @Override
-        public int compare(FileMetadata o1, FileMetadata o2) {
-            String folder1 = o1.getDirectoryLabel() == null ? "" : o1.getDirectoryLabel().toUpperCase();
-            String folder2 = o2.getDirectoryLabel() == null ? "" : o2.getDirectoryLabel().toUpperCase();
-            
-            
-            // We want to the files w/ no folders appear *after* all the folders
-            // on the sorted list:
-            if ("".equals(folder1) && !"".equals(folder2)) {
-                return 1;
-            }
-            
-            if ("".equals(folder2) && !"".equals(folder1)) {
-                return -1;
-            }
-            
-            int comp = folder1.compareTo(folder2); 
-            if (comp != 0) {
-                return comp;
+        public int compare(DataFileCategory o1, DataFileCategory o2) {
+            if (categoryMap != null) {
+                //If one is in the map and one is not, the former is first, otherwise sort by name
+                boolean o1InMap = categoryMap.containsKey(o1.getName().toUpperCase()); 
+                boolean o2InMap = categoryMap.containsKey(o2.getName().toUpperCase());
+                if(o1InMap && !o2InMap) {
+                    return (-1);
+                }
+                if(!o1InMap && o2InMap) {
+                    return 1;
+                }
             }
-            return o1.getLabel().toUpperCase().compareTo(o2.getLabel().toUpperCase());
+            return(o1.getName().toUpperCase().compareTo(o2.getName().toUpperCase()));
         }
     };
     
diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java
index 228db0a7584..479c8a429c6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java
@@ -22,16 +22,14 @@
 import edu.harvard.iq.dataverse.engine.command.impl.PersistProvFreeFormCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool;
 import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
 import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
-import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil;
-import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -39,9 +37,8 @@
 import edu.harvard.iq.dataverse.util.JsfHelper;
 import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
 import java.io.IOException;
-import java.time.format.DateTimeFormatter;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -49,19 +46,19 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonValue;
-import javax.validation.ConstraintViolation;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.validation.ConstraintViolation;
 
 import org.primefaces.PrimeFaces;
 import org.primefaces.component.tabview.TabView;
@@ -80,6 +77,7 @@ public class FilePage implements java.io.Serializable {
     private FileMetadata fileMetadata;
     private Long fileId;  
     private String version;
+    private String toolType;
     private DataFile file;   
     private GuestbookResponse guestbookResponse;
     private int selectedTabIndex;
@@ -91,6 +89,7 @@ public class FilePage implements java.io.Serializable {
     private List<ExternalTool> configureTools;
     private List<ExternalTool> exploreTools;
     private List<ExternalTool> toolsWithPreviews;
+    private List<ExternalTool> queryTools;
     private Long datasetVersionId;
     /**
      * Have the terms been met so that the Preview tab can show the preview?
@@ -152,7 +151,6 @@ public String init() {
      
         
         if (fileId != null || persistentId != null) {
-
             // ---------------------------------------
             // Set the file and datasetVersion 
             // ---------------------------------------           
@@ -242,13 +240,28 @@ public String init() {
             }
             configureTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.CONFIGURE, contentType);
             exploreTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.EXPLORE, contentType);
+            queryTools = externalToolService.findFileToolsByTypeAndContentType(ExternalTool.Type.QUERY, contentType);
             Collections.sort(exploreTools, CompareExternalToolName);
             toolsWithPreviews  = sortExternalTools();
-            if(!toolsWithPreviews.isEmpty()){
-                setSelectedTool(toolsWithPreviews.get(0));                
+
+            if (toolType != null) {
+                if (toolType.equals("PREVIEW")) {
+                    if (!toolsWithPreviews.isEmpty()) {
+                        setSelectedTool(toolsWithPreviews.get(0));
+                    }
+                }
+                if (toolType.equals("QUERY")) {
+                    if (!queryTools.isEmpty()) {
+                        setSelectedTool(queryTools.get(0));
+                    }
+                }
+            } else {
+                if (!getAllAvailableTools().isEmpty()){
+                    setSelectedTool(getAllAvailableTools().get(0));
+                }
             }
-        } else {
 
+        } else {
             return permissionsWrapper.notFound();
         }
         
@@ -266,10 +279,19 @@ public String init() {
     private void displayPublishMessage(){
         if (fileMetadata.getDatasetVersion().isDraft()  && canUpdateDataset()
                 &&   (canPublishDataset() || !fileMetadata.getDatasetVersion().getDataset().isLockedFor(DatasetLock.Reason.InReview))){
-            JsfHelper.addWarningMessage(datasetService.getReminderString(fileMetadata.getDatasetVersion().getDataset(), canPublishDataset(), true));
+            JsfHelper.addWarningMessage(datasetService.getReminderString(fileMetadata.getDatasetVersion().getDataset(), canPublishDataset(), true, isValid()));
         }               
     }
     
+    public boolean isValid() {
+        if (!fileMetadata.getDatasetVersion().isDraft()) {
+            return true;
+        }
+        DatasetVersion newVersion = fileMetadata.getDatasetVersion().cloneDatasetVersion();
+        newVersion.setDatasetFields(newVersion.initDatasetFields());
+        return newVersion.isValid();
+    }
+
     private boolean canViewUnpublishedDataset() {
         return permissionsWrapper.canViewUnpublishedDataset( dvRequestService.getDataverseRequest(), fileMetadata.getDatasetVersion().getDataset());
     }
@@ -303,6 +325,20 @@ private List<ExternalTool> sortExternalTools(){
         Collections.sort(retList, CompareExternalToolName);
         return retList;
     }
+    
+    private String termsGuestbookPopupAction = "";
+
+    public void setTermsGuestbookPopupAction(String popupAction){
+        if(popupAction != null && popupAction.length() > 0){
+            logger.info("TGPA set to " + popupAction);
+            this.termsGuestbookPopupAction = popupAction;
+        }
+
+    }
+
+    public String getTermsGuestbookPopupAction(){
+        return termsGuestbookPopupAction;
+    }
 
     public boolean isDownloadPopupRequired() {  
         if(fileMetadata.getId() == null || fileMetadata.getDatasetVersion().getId() == null ){
@@ -318,6 +354,18 @@ public boolean isRequestAccessPopupRequired() {
         return FileUtil.isRequestAccessPopupRequired(fileMetadata.getDatasetVersion());
     }
 
+    public boolean isGuestbookAndTermsPopupRequired() {  
+        if(fileMetadata.getId() == null || fileMetadata.getDatasetVersion().getId() == null ){
+            return false;
+        }
+        return FileUtil.isGuestbookAndTermsPopupRequired(fileMetadata.getDatasetVersion());
+    }
+    
+    public boolean isGuestbookPopupRequiredAtDownload(){
+        // Only show guestbookAtDownload if guestbook at request is disabled (legacy behavior)
+        DatasetVersion workingVersion = fileMetadata.getDatasetVersion();
+        return FileUtil.isGuestbookPopupRequired(workingVersion) && !workingVersion.getDataset().getEffectiveGuestbookEntryAtRequest();
+    }
 
     public void setFileMetadata(FileMetadata fileMetadata) {
         this.fileMetadata = fileMetadata;
@@ -364,9 +412,9 @@ public List< String[]> getExporters(){
                 // Not all metadata exports should be presented to the web users!
                 // Some are only for harvesting clients.
                 
-                String[] temp = new String[2];            
+                String[] temp = new String[2];
                 temp[0] = formatDisplayName;
-                temp[1] = myHostURL + "/api/datasets/export?exporter=" + formatName + "&persistentId=" + fileMetadata.getDatasetVersion().getDataset().getGlobalIdString();
+                temp[1] = myHostURL + "/api/datasets/export?exporter=" + formatName + "&persistentId=" + fileMetadata.getDatasetVersion().getDataset().getGlobalId().asString();
                 retList.add(temp);
             }
         }
@@ -726,7 +774,7 @@ public boolean isThumbnailAvailable(FileMetadata fileMetadata) {
     
     private String returnToDatasetOnly(){
         
-         return "/dataset.xhtml?persistentId=" + editDataset.getGlobalIdString()  + "&version=DRAFT" + "&faces-redirect=true";   
+         return "/dataset.xhtml?persistentId=" + editDataset.getGlobalId().asString()  + "&version=DRAFT" + "&faces-redirect=true";   
     }
     
     private String returnToDraftVersion(){ 
@@ -858,9 +906,9 @@ public String getComputeUrl() throws IOException {
             swiftObject.open();
             //generate a temp url for a file
             if (isHasPublicStore()) {
-                return settingsService.getValueForKey(SettingsServiceBean.Key.ComputeBaseUrl) + "?" + this.getFile().getOwner().getGlobalIdString() + "=" + swiftObject.getSwiftFileName();
+                return settingsService.getValueForKey(SettingsServiceBean.Key.ComputeBaseUrl) + "?" + this.getFile().getOwner().getGlobalId().asString() + "=" + swiftObject.getSwiftFileName();
             }
-            return settingsService.getValueForKey(SettingsServiceBean.Key.ComputeBaseUrl) + "?" + this.getFile().getOwner().getGlobalIdString() + "=" + swiftObject.getSwiftFileName() + "&temp_url_sig=" + swiftObject.getTempUrlSignature() + "&temp_url_expires=" + swiftObject.getTempUrlExpiry();
+            return settingsService.getValueForKey(SettingsServiceBean.Key.ComputeBaseUrl) + "?" + this.getFile().getOwner().getGlobalId().asString() + "=" + swiftObject.getSwiftFileName() + "&temp_url_sig=" + swiftObject.getTempUrlSignature() + "&temp_url_expires=" + swiftObject.getTempUrlExpiry();
         }
         return "";
     }
@@ -983,6 +1031,30 @@ public List<ExternalTool> getToolsWithPreviews() {
         return toolsWithPreviews;
     }
     
+    public List<ExternalTool> getQueryTools() {
+        return queryTools;
+    }
+    
+    
+    public List<ExternalTool> getAllAvailableTools(){
+        List<ExternalTool> externalTools = new ArrayList<>();
+        externalTools.addAll(queryTools);
+        for (ExternalTool pt : toolsWithPreviews){
+            if (!externalTools.contains(pt)){
+                externalTools.add(pt);
+            }
+        }        
+        return externalTools;
+    }
+    
+    public String getToolType() {
+        return toolType;
+    }
+
+    public void setToolType(String toolType) {
+        this.toolType = toolType;
+    }
+    
     private ExternalTool selectedTool;
 
     public ExternalTool getSelectedTool() {
@@ -996,8 +1068,8 @@ public void setSelectedTool(ExternalTool selectedTool) {
     public String preview(ExternalTool externalTool) {
         ApiToken apiToken = null;
         User user = session.getUser();
-        if (fileMetadata.getDatasetVersion().isDraft() || (fileMetadata.getDataFile().isRestricted()) || (FileUtil.isActivelyEmbargoed(fileMetadata))) {
-            apiToken=fileDownloadService.getApiToken(user);
+        if (fileMetadata.getDatasetVersion().isDraft() || fileMetadata.getDatasetVersion().isDeaccessioned() || (fileMetadata.getDataFile().isRestricted()) || (FileUtil.isActivelyEmbargoed(fileMetadata))) {
+            apiToken=authService.getValidApiTokenForUser(user);
         }
         if(externalTool == null){
             return "";
@@ -1175,7 +1247,22 @@ public String getEmbargoPhrase() {
             return BundleUtil.getStringFromBundle("embargoed.willbeuntil");
         }
     }
-
+    
+    public String getToolTabTitle(){
+        if (getAllAvailableTools().size() > 1) {
+            return BundleUtil.getStringFromBundle("file.toolTab.header");
+        }
+        if( getSelectedTool() != null ){
+           if(getSelectedTool().isPreviewTool()){
+               return BundleUtil.getStringFromBundle("file.previewTab.header");
+           } 
+           if(getSelectedTool().isQueryTool()){
+               return BundleUtil.getStringFromBundle("file.queryTab.header");
+           }          
+        } 
+        return BundleUtil.getStringFromBundle("file.toolTab.header");
+    }
+    
     public String getIngestMessage() {
         return BundleUtil.getStringFromBundle("file.ingestFailed.message", Arrays.asList(settingsWrapper.getGuidesBaseUrl(), settingsWrapper.getGuidesVersion()));
     }
@@ -1184,5 +1271,23 @@ public String getIngestMessage() {
     public boolean isHasPublicStore() {
         return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.PublicInstall, StorageIO.isPublicStore(DataAccess.getStorageDriverFromIdentifier(file.getStorageIdentifier())));
     }
+    
+    //Allows use of fileDownloadHelper in file.xhtml
+    public FileDownloadHelper getFileDownloadHelper() {
+        return fileDownloadHelper;
+    }
+
+    public void setFileDownloadHelper(FileDownloadHelper fileDownloadHelper) {
+        this.fileDownloadHelper = fileDownloadHelper;
+    }
+
+    /**
+     * This method only exists because in file-edit-button-fragment.xhtml we
+     * call bean.editFileMetadata() and we need both FilePage (this bean) and
+     * DatasetPage to have the method defined to prevent errors in server.log.
+     */
+    public String editFileMetadata(){
+        return "";
+    }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java
new file mode 100644
index 00000000000..62f10c18bdf
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java
@@ -0,0 +1,45 @@
+package edu.harvard.iq.dataverse;
+
+public class FileSearchCriteria {
+
+    private final String contentType;
+    private final FileAccessStatus accessStatus;
+    private final String categoryName;
+    private final String tabularTagName;
+    private final String searchText;
+
+    /**
+     * Status of the particular DataFile based on active embargoes and restriction state
+     */
+    public enum FileAccessStatus {
+        Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic
+    }
+
+    public FileSearchCriteria(String contentType, FileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText) {
+        this.contentType = contentType;
+        this.accessStatus = accessStatus;
+        this.categoryName = categoryName;
+        this.tabularTagName = tabularTagName;
+        this.searchText = searchText;
+    }
+
+    public String getContentType() {
+        return contentType;
+    }
+
+    public FileAccessStatus getAccessStatus() {
+        return accessStatus;
+    }
+
+    public String getCategoryName() {
+        return categoryName;
+    }
+
+    public String getTabularTagName() {
+        return tabularTagName;
+    }
+
+    public String getSearchText() {
+        return searchText;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java b/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java
index 5e73ef65f25..ce3b0d65875 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FileUploadRenderer.java
@@ -6,8 +6,8 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java
index 40d219d2638..db83ab953a1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFieldMapping.java
@@ -3,13 +3,8 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.*;
+import jakarta.persistence.*;
 import java.util.Collection;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java
index 0fac75257c8..eb7b97b1a84 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ForeignMetadataFormatMapping.java
@@ -7,18 +7,18 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
 import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/GlobalId.java b/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
index 20b280771fc..890b146a61c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GlobalId.java
@@ -6,7 +6,7 @@
 
 package edu.harvard.iq.dataverse;
 
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import static edu.harvard.iq.dataverse.util.StringUtil.isEmpty;
 import java.net.MalformedURLException;
@@ -16,7 +16,6 @@
 import java.util.Optional;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
 
 /**
  *
@@ -24,55 +23,28 @@
  */
 public class GlobalId implements java.io.Serializable {
     
-    public static final String DOI_PROTOCOL = "doi";
-    public static final String HDL_PROTOCOL = "hdl";
-    public static final String DOI_RESOLVER_URL = "https://doi.org/";
-    public static final String DXDOI_RESOLVER_URL = "https://dx.doi.org/";
-    public static final String HDL_RESOLVER_URL = "https://hdl.handle.net/";
-    public static final String HTTP_DOI_RESOLVER_URL = "http://doi.org/";
-    public static final String HTTP_DXDOI_RESOLVER_URL = "http://dx.doi.org/";
-    public static final String HTTP_HDL_RESOLVER_URL = "http://hdl.handle.net/";
-
-    public static Optional<GlobalId> parse(String identifierString) {
-        try {
-            return Optional.of(new GlobalId(identifierString));
-        } catch ( IllegalArgumentException _iae) {
-            return Optional.empty();
-        }
-    }
-    
     private static final Logger logger = Logger.getLogger(GlobalId.class.getName());
-    
-    @EJB
-    SettingsServiceBean settingsService;
 
-    /**
-     * 
-     * @param identifier The string to be parsed
-     * @throws IllegalArgumentException if the passed string cannot be parsed.
-     */
-    public GlobalId(String identifier) {
-        // set the protocol, authority, and identifier via parsePersistentId        
-        if ( ! parsePersistentId(identifier) ){
-            throw new IllegalArgumentException("Failed to parse identifier: " + identifier);
-        }
-    }
-
-    public GlobalId(String protocol, String authority, String identifier) {
+    public GlobalId(String protocol, String authority, String identifier, String separator, String urlPrefix, String providerName) {
         this.protocol = protocol;
         this.authority = authority;
         this.identifier = identifier;
+        if(separator!=null) {
+          this.separator = separator;
+        }
+        this.urlPrefix = urlPrefix;
+        this.managingProviderName = providerName;
     }
     
-    public GlobalId(DvObject dvObject) {
-        this.authority = dvObject.getAuthority();
-        this.protocol = dvObject.getProtocol();
-        this.identifier = dvObject.getIdentifier(); 
-    }
-        
+    // protocol the identifier system, e.g. "doi"
+    // authority the namespace that the authority manages in the identifier system
+    // identifier the local identifier part
     private String protocol;
     private String authority;
     private String identifier;
+    private String managingProviderName;
+    private String separator = "/";
+    private String urlPrefix;
 
     /**
      * Tests whether {@code this} instance has all the data required for a 
@@ -87,161 +59,50 @@ public String getProtocol() {
         return protocol;
     }
 
-    public void setProtocol(String protocol) {
-        this.protocol = protocol;
-    }
-
     public String getAuthority() {
         return authority;
     }
 
-    public void setAuthority(String authority) {
-        this.authority = authority;
-    }
-
     public String getIdentifier() {
         return identifier;
     }
-
-    public void setIdentifier(String identifier) {
-        this.identifier = identifier;
-    }
     
+    public String getProvider() {
+        return managingProviderName;
+    }
+
     public String toString() {
         return asString();
     }
     
     /**
-     * Returns {@code this}' string representation. Differs from {@link #toString}
-     * which can also contain debug data, if needed.
+     * Concatenate the parts that make up a Global Identifier.
      * 
-     * @return The string representation of this global id.
+     * @return the Global Identifier, e.g. "doi:10.12345/67890"
      */
     public String asString() {
         if (protocol == null || authority == null || identifier == null) {
             return "";
         }
-        return protocol + ":" + authority + "/" + identifier;
+        return protocol + ":" + authority + separator + identifier;
     }
     
-    public URL toURL() {
+    public String asURL() {
         URL url = null;
         if (identifier == null){
             return null;
         }
         try {
-            if (protocol.equals(DOI_PROTOCOL)){
-               url = new URL(DOI_RESOLVER_URL + authority + "/" + identifier); 
-            } else if (protocol.equals(HDL_PROTOCOL)){
-               url = new URL(HDL_RESOLVER_URL + authority + "/" + identifier);  
-            }           
+               url = new URL(urlPrefix + authority + separator + identifier);
+               return url.toExternalForm();
         } catch (MalformedURLException ex) {
             logger.log(Level.SEVERE, null, ex);
-        }       
-        return url;
-    }    
-
-    
-    /** 
-     *   Parse a Persistent Id and set the protocol, authority, and identifier
-     * 
-     *   Example 1: doi:10.5072/FK2/BYM3IW
-     *       protocol: doi
-     *       authority: 10.5072
-     *       identifier: FK2/BYM3IW
-     * 
-     *   Example 2: hdl:1902.1/111012
-     *       protocol: hdl
-     *       authority: 1902.1
-     *       identifier: 111012
-     *
-     * @param identifierString
-     * @param separator the string that separates the authority from the identifier.
-     * @param destination the global id that will contain the parsed data.
-     * @return {@code destination}, after its fields have been updated, or
-     *         {@code null} if parsing failed.
-     */
-    private boolean parsePersistentId(String identifierString) {
-
-        if (identifierString == null) {
-            return false;
-        }
-        int index1 = identifierString.indexOf(':');
-        if (index1 > 0) { // ':' found with one or more characters before it
-            int index2 = identifierString.indexOf('/', index1 + 1);
-            if (index2 > 0 && (index2 + 1) < identifierString.length()) { // '/' found with one or more characters
-                                                                          // between ':'
-                protocol = identifierString.substring(0, index1); // and '/' and there are characters after '/'
-                if (!"doi".equals(protocol) && !"hdl".equals(protocol)) {
-                    return false;
-                }
-                //Strip any whitespace, ; and ' from authority (should finding them cause a failure instead?)
-                authority = formatIdentifierString(identifierString.substring(index1 + 1, index2));
-                if(testforNullTerminator(authority)) return false;
-                if (protocol.equals(DOI_PROTOCOL)) {
-                    if (!this.checkDOIAuthority(authority)) {
-                        return false;
-                    }
-                }
-                // Passed all checks
-                //Strip any whitespace, ; and ' from identifier (should finding them cause a failure instead?)
-                identifier = formatIdentifierString(identifierString.substring(index2 + 1));
-                if(testforNullTerminator(identifier)) return false;               
-            } else {
-                logger.log(Level.INFO, "Error parsing identifier: {0}: '':<authority>/<identifier>'' not found in string", identifierString);
-                return false;
-            }
-        } else {
-            logger.log(Level.INFO, "Error parsing identifier: {0}: ''<protocol>:'' not found in string", identifierString);
-            return false;
-        }
-        return true;
-    }
-    
-    private static String formatIdentifierString(String str){
-        
-        if (str == null){
-            return null;
-        }
-        // remove whitespace, single quotes, and semicolons
-        return str.replaceAll("\\s+|'|;","");  
-        
-        /*
-        < 	(%3C)
-> 	(%3E)
-{ 	(%7B)
-} 	(%7D)
-^ 	(%5E)
-[ 	(%5B)
-] 	(%5D)
-` 	(%60)
-| 	(%7C)
-\ 	(%5C)
-+
-        */
-        // http://www.doi.org/doi_handbook/2_Numbering.html
-    }
-    
-    private static boolean testforNullTerminator(String str){
-        if(str == null) {
-            return false;
         }
-        return str.indexOf('\u0000') > 0;
-    }
-    
-    private boolean checkDOIAuthority(String doiAuthority){
-        
-        if (doiAuthority==null){
-            return false;
-        }
-        
-        if (!(doiAuthority.startsWith("10."))){
-            return false;
-        }
-        
-        return true;
+        return null;
     }
 
+
+
     /**
      * Verifies that the pid only contains allowed characters.
      *
@@ -257,26 +118,5 @@ public static boolean verifyImportCharacters(String pidParam) {
         return m.matches();
     }
 
-    /**
-     * Convenience method to get the internal form of a PID string when it may be in
-     * the https:// or http:// form ToDo -refactor class to allow creating a
-     * GlobalID from any form (which assures it has valid syntax) and then have methods to get
-     * the form you want.
-     * 
-     * @param pidUrlString - a string assumed to be a valid PID in some form
-     * @return the internal form as a String
-     */
-    public static String getInternalFormOfPID(String pidUrlString) {
-        String pidString = pidUrlString;
-        if(pidUrlString.startsWith(GlobalId.DOI_RESOLVER_URL)) {
-            pidString = pidUrlString.replace(GlobalId.DOI_RESOLVER_URL, (GlobalId.DOI_PROTOCOL + ":"));
-        } else if(pidUrlString.startsWith(GlobalId.HDL_RESOLVER_URL)) {
-            pidString = pidUrlString.replace(GlobalId.HDL_RESOLVER_URL, (GlobalId.HDL_PROTOCOL + ":"));
-        } else if(pidUrlString.startsWith(GlobalId.HTTP_DOI_RESOLVER_URL)) {
-            pidString = pidUrlString.replace(GlobalId.HTTP_DOI_RESOLVER_URL, (GlobalId.DOI_PROTOCOL + ":"));
-        } else if(pidUrlString.startsWith(GlobalId.HTTP_HDL_RESOLVER_URL)) {
-            pidString = pidUrlString.replace(GlobalId.HTTP_HDL_RESOLVER_URL, (GlobalId.HDL_PROTOCOL + ":"));
-        }
-        return pidString;
-    }
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/GlobalIdServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GlobalIdServiceBean.java
index 0d64c1050b8..aebf13778c3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GlobalIdServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GlobalIdServiceBean.java
@@ -2,6 +2,8 @@
 
 import static edu.harvard.iq.dataverse.GlobalIdServiceBean.logger;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
 
 import java.util.*;
@@ -13,11 +15,28 @@ public interface GlobalIdServiceBean {
 
     static final Logger logger = Logger.getLogger(GlobalIdServiceBean.class.getCanonicalName());
 
-    boolean alreadyExists(DvObject dvo) throws Exception;
+    boolean alreadyRegistered(DvObject dvo) throws Exception;
+    
+    /**
+     * This call reports whether a PID is registered with the external Provider
+     * service. For providers like DOIs/Handles with an external service, this call
+     * should accurately report whether the PID has been registered in the service.
+     * For providers with no external service, the call should return true if the
+     * PID is defined locally. If it isn't, these no-service providers need to know
+     * whether use case of the caller requires that the returned value should
+     * default to true or false - via the noProviderDefault parameter.
+     * 
+     * @param globalId
+     * @param noProviderDefault - when there is no external service, and no local
+     *                          use of the PID, this should be returned
+     * @return whether the PID should be considered registered or not.
+     * @throws Exception
+     */
+    boolean alreadyRegistered(GlobalId globalId, boolean noProviderDefault) throws Exception;
     
-    boolean alreadyExists(GlobalId globalId) throws Exception;
-
     boolean registerWhenPublished();
+    boolean canManagePID();
+    boolean isConfigured();
     
     List<String> getProviderInformation();
 
@@ -25,15 +44,6 @@ public interface GlobalIdServiceBean {
 
     Map<String,String> getIdentifierMetadata(DvObject dvo);
 
-    /**
-     * Concatenate the parts that make up a Global Identifier.
-     * @param protocol the identifier system, e.g. "doi"
-     * @param authority the namespace that the authority manages in the identifier system
-     * @param identifier the local identifier part
-     * @return the Global Identifier, e.g. "doi:10.12345/67890"
-     */
-    String getIdentifierForLookup(String protocol, String authority, String identifier);
-
     String modifyIdentifierTargetURL(DvObject dvo) throws Exception;
 
     void deleteIdentifier(DvObject dvo) throws Exception;
@@ -42,18 +52,27 @@ public interface GlobalIdServiceBean {
     
     Map<String,String> getMetadataForTargetURL(DvObject dvObject);
     
-    Map<String,String> lookupMetadataFromIdentifier(String protocol, String authority, String identifier);
-    
     DvObject generateIdentifier(DvObject dvObject);
     
     String getIdentifier(DvObject dvObject);
     
     boolean publicizeIdentifier(DvObject studyIn);
     
+    String generateDatasetIdentifier(Dataset dataset);
+    String generateDataFileIdentifier(DataFile datafile);
+    boolean isGlobalIdUnique(GlobalId globalId);
+    
+    String getUrlPrefix();
+    String getSeparator();
+    
     static GlobalIdServiceBean getBean(String protocol, CommandContext ctxt) {
         final Function<CommandContext, GlobalIdServiceBean> protocolHandler = BeanDispatcher.DISPATCHER.get(protocol);
         if ( protocolHandler != null ) {
-            return protocolHandler.apply(ctxt);
+            GlobalIdServiceBean theBean = protocolHandler.apply(ctxt);
+            if(theBean != null && theBean.isConfigured()) {
+                logger.fine("getBean returns " + theBean.getProviderInformation().get(0) + " for protocol " + protocol);
+            }
+            return theBean;
         } else {
             logger.log(Level.SEVERE, "Unknown protocol: {0}", protocol);
             return null;
@@ -64,8 +83,113 @@ static GlobalIdServiceBean getBean(CommandContext ctxt) {
         return getBean(ctxt.settings().getValueForKey(Key.Protocol, ""), ctxt);
     }
     
+    public static Optional<GlobalId> parse(String identifierString) {
+        try {
+            return Optional.of(PidUtil.parseAsGlobalID(identifierString));
+        } catch ( IllegalArgumentException _iae) {
+            return Optional.empty();
+        }
+    }
+    
+    /** 
+     *   Parse a Persistent Id and set the protocol, authority, and identifier
+     * 
+     *   Example 1: doi:10.5072/FK2/BYM3IW
+     *       protocol: doi
+     *       authority: 10.5072
+     *       identifier: FK2/BYM3IW
+     * 
+     *   Example 2: hdl:1902.1/111012
+     *       protocol: hdl
+     *       authority: 1902.1
+     *       identifier: 111012
+     *
+     * @param identifierString
+     * @param separator the string that separates the authority from the identifier.
+     * @param destination the global id that will contain the parsed data.
+     * @return {@code destination}, after its fields have been updated, or
+     *         {@code null} if parsing failed.
+     */
+    public GlobalId parsePersistentId(String identifierString);
+    public GlobalId parsePersistentId(String protocol, String authority, String identifier);
+
+    
+    
+    public static boolean isValidGlobalId(String protocol, String authority, String identifier) {
+        if (protocol == null || authority == null || identifier == null) {
+            return false;
+        }
+        if(!authority.equals(GlobalIdServiceBean.formatIdentifierString(authority))) {
+            return false;
+        }
+        if (GlobalIdServiceBean.testforNullTerminator(authority)) {
+            return false;
+        }
+        if(!identifier.equals(GlobalIdServiceBean.formatIdentifierString(identifier))) {
+            return false;
+        }
+        if (GlobalIdServiceBean.testforNullTerminator(identifier)) {
+            return false;
+        }
+        return true;
+    }
+    
+    static String formatIdentifierString(String str){
+        
+        if (str == null){
+            return null;
+        }
+        // remove whitespace, single quotes, and semicolons
+        return str.replaceAll("\\s+|'|;","");  
+        
+        /*
+        <   (%3C)
+>   (%3E)
+{   (%7B)
+}   (%7D)
+^   (%5E)
+[   (%5B)
+]   (%5D)
+`   (%60)
+|   (%7C)
+\   (%5C)
++
+        */
+        // http://www.doi.org/doi_handbook/2_Numbering.html
+    }
+    
+    static boolean testforNullTerminator(String str){
+        if(str == null) {
+            return false;
+        }
+        return str.indexOf('\u0000') > 0;
+    }
+    
+    static boolean checkDOIAuthority(String doiAuthority){
+        
+        if (doiAuthority==null){
+            return false;
+        }
+        
+        if (!(doiAuthority.startsWith("10."))){
+            return false;
+        }
+        
+        return true;
+    }
 }
 
+
+/*
+ * ToDo - replace this with a mechanism like BrandingUtilHelper that would read
+ * the config and create PidProviders, one per set of config values and serve
+ * those as needed. The help has to be a bean to autostart and to hand the
+ * required service beans to the PidProviders. That may boil down to just the
+ * dvObjectService (to check for local identifier conflicts) since it will be
+ * the helper that has to read settings/get systewmConfig values.
+ * 
+ */
+
 /**
  * Static utility class for dispatching implementing beans, based on protocol and providers.
  * @author michael
@@ -86,5 +210,7 @@ class BeanDispatcher {
                     return null;
             }
         });
+        
+        DISPATCHER.put(PermaLinkPidProviderServiceBean.PERMA_PROTOCOL, ctxt->ctxt.permaLinkProvider() );
     }
 }
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/Guestbook.java b/src/main/java/edu/harvard/iq/dataverse/Guestbook.java
index 18913bfd5bf..2ef23d1f925 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Guestbook.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Guestbook.java
@@ -3,27 +3,25 @@
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.Serializable;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToMany;
 import java.util.List;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.ManyToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.Transient;
+import jakarta.persistence.Column;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.Transient;
 
 import edu.harvard.iq.dataverse.util.DateUtil;
-import org.apache.commons.text.StringEscapeUtils;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java
index 7cbb69e5c1d..f54b1fb6117 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookPage.java
@@ -17,13 +17,13 @@
 import java.util.Iterator;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -288,19 +288,21 @@ public String save() {
            
         Command<Dataverse> cmd;
         try {
+            // Per recent #dv-tech conversation w/ Jim - copying the code 
+            // below from his QDR branch; the code that used to be here called
+            // UpdateDataverseCommand when saving new guestbooks, and that involved 
+            // an unnecessary reindexing of the dataverse (and, in some cases, 
+            // reindexing of the underlying datasets). - L.A.
             if (editMode == EditMode.CREATE || editMode == EditMode.CLONE ) {
                 guestbook.setCreateTime(new Timestamp(new Date().getTime()));
-                guestbook.setUsageCount(new Long(0));
+                guestbook.setUsageCount(Long.valueOf(0));
                 guestbook.setEnabled(true);
                 dataverse.getGuestbooks().add(guestbook);
-                cmd = new UpdateDataverseCommand(dataverse, null, null, dvRequestService.getDataverseRequest(), null);                
-                commandEngine.submit(cmd);
                 create = true;
-            } else {
-                cmd = new UpdateDataverseGuestbookCommand(dataverse, guestbook, dvRequestService.getDataverseRequest());
-                commandEngine.submit(cmd);
-            }
-
+            } 
+            cmd = new UpdateDataverseGuestbookCommand(dataverse, guestbook, dvRequestService.getDataverseRequest());
+            commandEngine.submit(cmd);
+        
         } catch (EJBException ex) {
             StringBuilder error = new StringBuilder();
             error.append(ex).append(" ");
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java
index 69404482fce..9041ccf887c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponse.java
@@ -8,13 +8,15 @@
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+
 import java.io.Serializable;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
-import javax.persistence.*;
-import javax.validation.constraints.Size;
+import jakarta.persistence.*;
+import jakarta.validation.constraints.Size;
 
 /**
  *
@@ -65,8 +67,9 @@ public class GuestbookResponse implements Serializable {
     @JoinColumn(nullable=true)
     private AuthenticatedUser authenticatedUser;
 
-    @OneToOne(cascade=CascadeType.ALL,mappedBy="guestbookResponse",fetch = FetchType.LAZY, optional = false)
-    private FileDownload fileDownload;
+    @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},fetch = FetchType.LAZY)
+    //private FileAccessRequest fileAccessRequest;
+    private List<FileAccessRequest> fileAccessRequests;
      
     @OneToMany(mappedBy="guestbookResponse",cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST},orphanRemoval=true)
     @OrderBy ("id")
@@ -87,16 +90,37 @@ public class GuestbookResponse implements Serializable {
     
     @Temporal(value = TemporalType.TIMESTAMP)
     private Date responseTime;
+
+    private String sessionId;
+    private String eventType;
+
+    /** Event Types - there are four pre-defined values in use.
+     * The type can also be the name of a previewer/explore tool
+     */
     
+    public static final String ACCESS_REQUEST = "AccessRequest";
+    public static final String DOWNLOAD = "Download";
+    static final String SUBSET = "Subset";
+    static final String EXPLORE = "Explore";
+
     /*
     Transient Values carry non-written information 
     that will assist in the download process
     - writeResponse is set to false when dataset version is draft.
+    - selected file ids is a comma delimited list that contains the file ids for multiple download
+    - fileFormat tells the download api which format a subsettable file should be downloaded as
+
     */
       
     @Transient 
     private boolean writeResponse = true;
 
+    @Transient
+    private String selectedFileIds;
+    
+    @Transient 
+    private String fileFormat;
+
     /**
      * This transient variable is a place to temporarily retrieve the
      * ExternalTool object from the popup when the popup is required on the
@@ -105,6 +129,7 @@ public class GuestbookResponse implements Serializable {
     @Transient
     private ExternalTool externalTool;
 
+    
     public boolean isWriteResponse() {
         return writeResponse;
     }
@@ -114,19 +139,19 @@ public void setWriteResponse(boolean writeResponse) {
     }
 
     public String getSelectedFileIds(){
-        return this.fileDownload.getSelectedFileIds();
+        return this.selectedFileIds;
     }
     
     public void setSelectedFileIds(String selectedFileIds) {
-        this.fileDownload.setSelectedFileIds(selectedFileIds);
+        this.selectedFileIds = selectedFileIds;
     }
     
     public String getFileFormat() {
-        return this.fileDownload.getFileFormat();
+        return this.fileFormat;
     }
 
     public void setFileFormat(String downloadFormat) {
-        this.fileDownload.setFileFormat(downloadFormat);
+        this.fileFormat = downloadFormat;
     }
     
     public ExternalTool getExternalTool() {
@@ -138,10 +163,6 @@ public void setExternalTool(ExternalTool externalTool) {
     }
 
     public GuestbookResponse(){
-        if(this.getFileDownload() == null){
-            this.fileDownload = new FileDownload();
-            this.fileDownload.setGuestbookResponse(this);
-        }
     }
     
     public GuestbookResponse(GuestbookResponse source){
@@ -154,7 +175,7 @@ public GuestbookResponse(GuestbookResponse source){
         this.setDataset(source.getDataset());
         this.setDatasetVersion(source.getDatasetVersion());
         this.setAuthenticatedUser(source.getAuthenticatedUser());
-   
+        this.setSessionId(source.getSessionId());
         List <CustomQuestionResponse> customQuestionResponses = new ArrayList<>();
         if (!source.getCustomQuestionResponses().isEmpty()){
             for (CustomQuestionResponse customQuestionResponse : source.getCustomQuestionResponses() ){
@@ -167,7 +188,6 @@ public GuestbookResponse(GuestbookResponse source){
         }
         this.setCustomQuestionResponses(customQuestionResponses);
         this.setGuestbook(source.getGuestbook());
-        this.setFileDownload(source.getFileDownload());
     }
     
     
@@ -225,17 +245,11 @@ public Date getResponseTime() {
 
     public void setResponseTime(Date responseTime) {
         this.responseTime = responseTime;
-        this.getFileDownload().setDownloadTimestamp(responseTime);
     }
 
     public String getResponseDate() {
         return new SimpleDateFormat("MMMM d, yyyy").format(responseTime);
     }
-    
-    public String getResponseDateForDisplay(){
-        return null; //    SimpleDateFormat("yyyy").format(new Timestamp(new Date().getTime()));
-    }
-    
 
     public List<CustomQuestionResponse> getCustomQuestionResponses() {
         return customQuestionResponses;
@@ -245,15 +259,14 @@ public void setCustomQuestionResponses(List<CustomQuestionResponse> customQuesti
         this.customQuestionResponses = customQuestionResponses;
     }
     
-    public FileDownload getFileDownload(){
-        return fileDownload;
+    public List<FileAccessRequest> getFileAccessRequests(){
+        return fileAccessRequests;
     }
-    
-    public void setFileDownload(FileDownload fDownload){
-        this.fileDownload = fDownload;
+
+    public void setFileAccessRequest(List<FileAccessRequest> fARs){
+        this.fileAccessRequests = fARs;
     }
     
-    
     public Dataset getDataset() {
         return dataset;
     }
@@ -286,22 +299,55 @@ public void setAuthenticatedUser(AuthenticatedUser authenticatedUser) {
         this.authenticatedUser = authenticatedUser;
     }
     
-    public String getDownloadtype() {
-        return this.fileDownload.getDownloadtype();
+    public String getEventType() {
+        return this.eventType;
     }
 
-    public void setDownloadtype(String downloadtype) {
-        this.fileDownload.setDownloadtype(downloadtype);
+    public void setEventType(String eventType) {
+        this.eventType = eventType;
         
     }
     
     public String getSessionId() {
-        return this.fileDownload.getSessionId();
+        return this.sessionId;
     }
 
     public void setSessionId(String sessionId) {
         
-        this.fileDownload.setSessionId(sessionId);
+        this.sessionId= sessionId;
+    }
+    
+    public String toHtmlFormattedResponse() {
+
+        StringBuilder sb = new StringBuilder();
+
+        sb.append(BundleUtil.getStringFromBundle("dataset.guestbookResponse.id") + ": " + getId() + "<br>\n");
+        sb.append(BundleUtil.getStringFromBundle("dataset.guestbookResponse.date") + ": " + getResponseDate() + "<br>\n");
+        sb.append(BundleUtil.getStringFromBundle("dataset.guestbookResponse.respondent") + "<br><ul style=\"list-style-type:none;\">\n<li>"
+                + BundleUtil.getStringFromBundle("name") + ": " + getName() + "</li>\n<li>");
+        sb.append("  " + BundleUtil.getStringFromBundle("email") + ": " + getEmail() + "</li>\n<li>");
+        sb.append(
+                "  " + BundleUtil.getStringFromBundle("institution") + ": " + wrapNullAnswer(getInstitution()) + "</li>\n<li>");
+        sb.append("  " + BundleUtil.getStringFromBundle("position") + ": " + wrapNullAnswer(getPosition()) + "</li></ul>\n");
+        sb.append(BundleUtil.getStringFromBundle("dataset.guestbookResponse.guestbook.additionalQuestions")
+                + ":<ul style=\"list-style-type:none;\">\n");
+
+        for (CustomQuestionResponse cqr : getCustomQuestionResponses()) {
+            sb.append("<li>" + BundleUtil.getStringFromBundle("dataset.guestbookResponse.question") + ": "
+                    + cqr.getCustomQuestion().getQuestionString() + "<br>"
+                    + BundleUtil.getStringFromBundle("dataset.guestbookResponse.answer") + ": "
+                    + wrapNullAnswer(cqr.getResponse()) + "</li>\n");
+        }
+        sb.append("</ul>");
+        return sb.toString();
+    }
+    
+    private String wrapNullAnswer(String answer) {
+        //This assumes we don't have to distinguish null from when the user actually answers "(No Reponse)". The db still has the real value
+        if (answer == null) {
+            return BundleUtil.getStringFromBundle("dataset.guestbookResponse.noResponse");
+        }
+        return answer;
     }
     
     @Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
index f4cf38979c5..b0cc41eb448 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponseServiceBean.java
@@ -12,6 +12,7 @@
 import java.io.IOException;
 import java.io.OutputStream;
 import java.text.SimpleDateFormat;
+import java.time.LocalDate;
 import java.util.ArrayList;
 import java.util.Calendar;
 import java.util.Date;
@@ -20,17 +21,17 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.faces.model.SelectItem;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.faces.model.SelectItem;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+import jakarta.persistence.TypedQuery;
 import org.apache.commons.text.StringEscapeUtils;
 /**
  *
@@ -62,15 +63,14 @@ public class GuestbookResponseServiceBean {
                 + " and r.dataset_id = o.id "
                 + " and r.guestbook_id = g.id ";*/
     
-    private static final String BASE_QUERY_STRING_FOR_DOWNLOAD_AS_CSV = "select r.id, g.name, o.id, r.responsetime, f.downloadtype,"
+    private static final String BASE_QUERY_STRING_FOR_DOWNLOAD_AS_CSV = "select r.id, g.name, o.id, r.responsetime, r.eventtype,"
                 + " m.label, r.dataFile_id, r.name, r.email, r.institution, r.position,"
                 + " o.protocol, o.authority, o.identifier, d.protocol, d.authority, d.identifier "
-                + "from guestbookresponse r, filedownload f, filemetadata m, dvobject o, guestbook g, dvobject d "
+                + "from guestbookresponse r, filemetadata m, dvobject o, guestbook g, dvobject d "
                 + "where "  
                 + "m.datasetversion_id = (select max(datasetversion_id) from filemetadata where datafile_id =r.datafile_id ) "
                 + " and m.datafile_id = r.datafile_id "
                 + " and d.id = r.datafile_id "
-                + " and r.id = f.guestbookresponse_id "
                 + " and r.dataset_id = o.id "
                 + " and r.guestbook_id = g.id ";
     
@@ -78,14 +78,13 @@ public class GuestbookResponseServiceBean {
     // on the guestbook-results.xhtml page (the info we show on the page is 
     // less detailed than what we let the users download as CSV files, so this 
     // query has fewer fields than the one above). -- L.A.
-    private static final String BASE_QUERY_STRING_FOR_PAGE_DISPLAY = "select  r.id, v.value, r.responsetime, f.downloadtype,  m.label, r.name "
-                + "from guestbookresponse r, filedownload f, datasetfieldvalue v, filemetadata m , dvobject o "
+    private static final String BASE_QUERY_STRING_FOR_PAGE_DISPLAY = "select  r.id, v.value, r.responsetime, r.eventtype,  m.label, r.name "
+                + "from guestbookresponse r, datasetfieldvalue v, filemetadata m , dvobject o "
                 + "where "  
                 + " v.datasetfield_id = (select id from datasetfield f where datasetfieldtype_id = 1 "
                 + " and datasetversion_id = (select max(id) from datasetversion where dataset_id =r.dataset_id )) "
                 + " and m.datasetversion_id = (select max(datasetversion_id) from filemetadata where datafile_id =r.datafile_id ) "
                 + " and m.datafile_id = r.datafile_id "
-                + " and r.id = f.guestbookresponse_id "
                 + " and r.dataset_id = o.id ";
     
     // And a custom query for retrieving *all* the custom question responses, for 
@@ -640,6 +639,9 @@ public GuestbookResponse initGuestbookResponseForFragment(DatasetVersion working
        
         GuestbookResponse guestbookResponse = new GuestbookResponse();
         
+        //Not otherwise set for multi-file downloads
+        guestbookResponse.setDatasetVersion(workingVersion);
+        
         if(workingVersion.isDraft()){           
             guestbookResponse.setWriteResponse(false);
         } 
@@ -666,7 +668,7 @@ public GuestbookResponse initGuestbookResponseForFragment(DatasetVersion working
         if (dataset.getGuestbook() != null && !dataset.getGuestbook().getCustomQuestions().isEmpty()) {
             initCustomQuestions(guestbookResponse, dataset);
         }
-        guestbookResponse.setDownloadtype("Download");
+        guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD);
 
         guestbookResponse.setDataset(dataset);
         
@@ -720,9 +722,9 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String
         if (dataset.getGuestbook() != null && !dataset.getGuestbook().getCustomQuestions().isEmpty()) {
             initCustomQuestions(guestbookResponse, dataset);
         }
-        guestbookResponse.setDownloadtype("Download");
+        guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD);
         if(downloadFormat.toLowerCase().equals("subset")){
-            guestbookResponse.setDownloadtype("Subset");
+            guestbookResponse.setEventType(GuestbookResponse.SUBSET);
         }
         if(downloadFormat.toLowerCase().equals("explore")){
             /**
@@ -733,12 +735,12 @@ public GuestbookResponse initGuestbookResponse(FileMetadata fileMetadata, String
              * "externalTool" for all external tools, including TwoRavens. When
              * clicking "Explore" and then the name of the tool, we want the
              * name of the exploration tool (i.e. "Data Explorer",
-             * etc.) to be persisted as the downloadType. We execute
-             * guestbookResponse.setDownloadtype(externalTool.getDisplayName())
+             * etc.) to be persisted as the eventType. We execute
+             * guestbookResponse.setEventType(externalTool.getDisplayName())
              * over in the "explore" method of FileDownloadServiceBean just
              * before the guestbookResponse is written.
              */
-            guestbookResponse.setDownloadtype("Explore");
+            guestbookResponse.setEventType(GuestbookResponse.EXPLORE);
         }
         guestbookResponse.setDataset(dataset);
         
@@ -817,7 +819,7 @@ public GuestbookResponse initDefaultGuestbookResponse(Dataset dataset, DataFile
         guestbookResponse.setDataset(dataset);
         guestbookResponse.setResponseTime(new Date());
         guestbookResponse.setSessionId(session.toString());
-        guestbookResponse.setDownloadtype("Download");
+        guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD);
         setUserDefaultResponses(guestbookResponse, session);
         return guestbookResponse;
     }
@@ -838,7 +840,7 @@ public GuestbookResponse initAPIGuestbookResponse(Dataset dataset, DataFile data
         guestbookResponse.setDataset(dataset);
         guestbookResponse.setResponseTime(new Date());
         guestbookResponse.setSessionId(session.toString());
-        guestbookResponse.setDownloadtype("Download");
+        guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD);
         setUserDefaultResponses(guestbookResponse, session, user);
         return guestbookResponse;
     }
@@ -902,20 +904,36 @@ public void save(GuestbookResponse guestbookResponse) {
         em.persist(guestbookResponse);
     }
     
+    
+    /*
+     * Metrics - download counts from GuestbookResponses: Any GuestbookResponse that
+     * is not of eventtype=='AccessRequest' is considered a download. This includes
+     * actual 'Download's, downloads of 'Subset's, and use by 'Explore' tools and
+     * previewers (where eventtype is the previewer name)
+     */
         
-    public Long getCountGuestbookResponsesByDataFileId(Long dataFileId) {
+    public Long getDownloadCountByDataFileId(Long dataFileId) {
         // datafile id is null, will return 0
-        Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse  o  where o.datafile_id  = " + dataFileId);
+        Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse  o  where o.datafile_id  = " + dataFileId + "and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'");
         return (Long) query.getSingleResult();
     }
     
-    public Long getCountGuestbookResponsesByDatasetId(Long datasetId) {
+    public Long getDownloadCountByDatasetId(Long datasetId) {
+        return getDownloadCountByDatasetId(datasetId, null);
+    }
+    
+    public Long getDownloadCountByDatasetId(Long datasetId, LocalDate date) {
         // dataset id is null, will return 0        
-        Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse  o  where o.dataset_id  = " + datasetId);
+        Query query;
+        if(date != null) {
+            query = em.createNativeQuery("select count(o.id) from GuestbookResponse  o  where o.dataset_id  = " + datasetId + " and responsetime < '" + date.toString() + "' and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'");
+        }else {
+            query = em.createNativeQuery("select count(o.id) from GuestbookResponse  o  where o.dataset_id  = " + datasetId+ "and eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"'");
+        }
         return (Long) query.getSingleResult();
     }    
 
-    public Long getCountOfAllGuestbookResponses() {
+    public Long getTotalDownloadCount() {
         // dataset id is null, will return 0  
         
         // "SELECT COUNT(*)" is notoriously expensive in PostgresQL for large 
@@ -944,10 +962,12 @@ public Long getCountOfAllGuestbookResponses() {
         } catch (IllegalArgumentException iae) {
             // Don't do anything, we'll fall back to using "SELECT COUNT()"
         }
-        Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse  o;");
+        Query query = em.createNativeQuery("select count(o.id) from GuestbookResponse  o where eventtype != '" + GuestbookResponse.ACCESS_REQUEST +"';");
         return (Long) query.getSingleResult();
     }
     
+    //End Metrics/download counts
+    
     public List<GuestbookResponse> findByAuthenticatedUserId(AuthenticatedUser user) {
         Query query = em.createNamedQuery("GuestbookResponse.findByAuthenticatedUserId"); 
         query.setParameter("authenticatedUserId", user.getId());
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java
index 23aac4a24a3..c53df93def8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java
@@ -6,20 +6,19 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand;
-import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java
index 5394ddc652a..fcd4e91d455 100644
--- a/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookServiceBean.java
@@ -5,12 +5,11 @@
  */
 package edu.harvard.iq.dataverse;
 
-import java.util.List;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java
index df16991b51e..4942db9e7ec 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HandlenetServiceBean.java
@@ -20,15 +20,17 @@
 
 package edu.harvard.iq.dataverse;
 
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
 import java.io.File;
 import java.io.FileInputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
 import java.security.PrivateKey;
 
 /* Handlenet imports: */
@@ -64,14 +66,17 @@ public class HandlenetServiceBean extends AbstractGlobalIdServiceBean {
     @EJB
     DataverseServiceBean dataverseService;
     @EJB 
-    SettingsServiceBean settingsService;    
+    SettingsServiceBean settingsService;
     private static final Logger logger = Logger.getLogger(HandlenetServiceBean.class.getCanonicalName());
     
-    private static final String HANDLE_PROTOCOL_TAG = "hdl";
-    int handlenetIndex = System.getProperty("dataverse.handlenet.index")!=null? Integer.parseInt(System.getProperty("dataverse.handlenet.index")) : 300;
+    public static final String HDL_PROTOCOL = "hdl";
+    int handlenetIndex = JvmSettings.HANDLENET_INDEX.lookup(Integer.class);
+    public static final String HTTP_HDL_RESOLVER_URL = "http://hdl.handle.net/";
+    public static final String HDL_RESOLVER_URL = "https://hdl.handle.net/";
     
     public HandlenetServiceBean() {
         logger.log(Level.FINE,"Constructor");
+        configured = true;
     }
 
     @Override
@@ -81,7 +86,7 @@ public boolean registerWhenPublished() {
 
     public void reRegisterHandle(DvObject dvObject) {
         logger.log(Level.FINE,"reRegisterHandle");
-        if (!HANDLE_PROTOCOL_TAG.equals(dvObject.getProtocol())) {
+        if (!HDL_PROTOCOL.equals(dvObject.getProtocol())) {
             logger.log(Level.WARNING, "reRegisterHandle called on a dvObject with the non-handle global id: {0}", dvObject.getId());
         }
         
@@ -226,8 +231,8 @@ private ResolutionRequest buildResolutionRequest(final String handle) {
     private PublicKeyAuthenticationInfo getAuthInfo(String handlePrefix) {
         logger.log(Level.FINE,"getAuthInfo");
         byte[] key = null;
-        String adminCredFile = System.getProperty("dataverse.handlenet.admcredfile");
-        int handlenetIndex = System.getProperty("dataverse.handlenet.index")!=null? Integer.parseInt(System.getProperty("dataverse.handlenet.index")) : 300;
+        String adminCredFile = JvmSettings.HANDLENET_KEY_PATH.lookup();
+        int handlenetIndex = JvmSettings.HANDLENET_INDEX.lookup(Integer.class);
        
         key = readKey(adminCredFile);        
         PrivateKey privkey = null;
@@ -268,13 +273,13 @@ private byte[] readKey(final String file) {
     
     private PrivateKey readPrivKey(byte[] key, final String file) {
         logger.log(Level.FINE,"readPrivKey");
-        PrivateKey privkey=null;
+        PrivateKey privkey = null;
         
-        String secret = System.getProperty("dataverse.handlenet.admprivphrase");
-        byte secKey[] = null;
         try {
+            byte[] secKey = null;
             if ( Util.requiresSecretKey(key) ) {
-                secKey = secret.getBytes();
+                String secret = JvmSettings.HANDLENET_KEY_PASSPHRASE.lookup();
+                secKey = secret.getBytes(StandardCharsets.UTF_8);
             }
             key = Util.decrypt(key, secKey);
             privkey = Util.getPrivateKeyFromBytes(key, 0);
@@ -309,13 +314,13 @@ private String getAuthenticationHandle(String handlePrefix) {
     }
 
     @Override
-    public boolean alreadyExists(DvObject dvObject) throws Exception {
+    public boolean alreadyRegistered(DvObject dvObject) throws Exception {
         String handle = getDvObjectHandle(dvObject);
         return isHandleRegistered(handle);
     }
     
     @Override
-    public boolean alreadyExists(GlobalId pid) throws Exception {
+    public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) throws Exception {
         String handle = pid.getAuthority() + "/" + pid.getIdentifier();
         return isHandleRegistered(handle);
     }
@@ -325,11 +330,6 @@ public Map<String,String> getIdentifierMetadata(DvObject dvObject) {
         throw new NotImplementedException();
     }
 
-    @Override
-    public HashMap lookupMetadataFromIdentifier(String protocol, String authority, String identifier)  {
-        throw new NotImplementedException();
-    }
-
     @Override
     public String modifyIdentifierTargetURL(DvObject dvObject) throws Exception  {
         logger.log(Level.FINE,"modifyIdentifier");
@@ -347,9 +347,9 @@ public String modifyIdentifierTargetURL(DvObject dvObject) throws Exception  {
     public void deleteIdentifier(DvObject dvObject) throws Exception  {
         String handle = getDvObjectHandle(dvObject);
         String authHandle = getAuthenticationHandle(dvObject);
-
-        String adminCredFile = System.getProperty("dataverse.handlenet.admcredfile");
-        int handlenetIndex = System.getProperty("dataverse.handlenet.index")!=null? Integer.parseInt(System.getProperty("dataverse.handlenet.index")) : 300;
+    
+        String adminCredFile = JvmSettings.HANDLENET_KEY_PATH.lookup();
+        int handlenetIndex = JvmSettings.HANDLENET_INDEX.lookup(Integer.class);
        
         byte[] key = readKey(adminCredFile);
         PrivateKey privkey = readPrivKey(key, adminCredFile);
@@ -383,12 +383,7 @@ private boolean updateIdentifierStatus(DvObject dvObject, String statusIn) {
     
     @Override
     public List<String> getProviderInformation(){
-        ArrayList <String> providerInfo = new ArrayList<>();
-        String providerName = "Handle";
-        String providerLink = "https://hdl.handle.net";
-        providerInfo.add(providerName);
-        providerInfo.add(providerLink);
-        return providerInfo;
+        return List.of("Handle", "https://hdl.handle.net");
     }
 
 
@@ -412,7 +407,37 @@ public boolean publicizeIdentifier(DvObject dvObject) {
 
     }
 
-}
+    @Override
+    public GlobalId parsePersistentId(String pidString) {
+        if (pidString.startsWith(HDL_RESOLVER_URL)) {
+            pidString = pidString.replace(HDL_RESOLVER_URL, (HDL_PROTOCOL + ":"));
+        } else if (pidString.startsWith(HTTP_HDL_RESOLVER_URL)) {
+            pidString = pidString.replace(HTTP_HDL_RESOLVER_URL, (HDL_PROTOCOL + ":"));
+        }
+        return super.parsePersistentId(pidString);
+    }
 
+    @Override
+    public GlobalId parsePersistentId(String protocol, String identifierString) {
+        if (!HDL_PROTOCOL.equals(protocol)) {
+            return null;
+        }
+        GlobalId globalId = super.parsePersistentId(protocol, identifierString);
+        return globalId;
+    }
+    
+    @Override
+    public GlobalId parsePersistentId(String protocol, String authority, String identifier) {
+        if (!HDL_PROTOCOL.equals(protocol)) {
+            return null;
+        }
+        return super.parsePersistentId(protocol, authority, identifier);
+    }
+
+    @Override
+    public String getUrlPrefix() {
+        return HDL_RESOLVER_URL;
+    }
+}
 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java
index 5be7578f7f8..f008db1403f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingClientsPage.java
@@ -27,17 +27,17 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.model.SelectItem;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.model.SelectItem;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java
index 28df6e19e65..6709b978c47 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingDataverseConfig.java
@@ -6,16 +6,16 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
index 432683a5797..6dbba34920b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HarvestingSetsPage.java
@@ -6,11 +6,6 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
-import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateHarvestingClientCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.UpdateHarvestingClientCommand;
-import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
-import edu.harvard.iq.dataverse.harvest.client.HarvestingClientServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAIRecord;
 import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAISet;
@@ -26,15 +21,15 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.lang3.StringUtils;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java b/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java
index ef9b3267db4..e1864194436 100644
--- a/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/HomepageServlet.java
@@ -7,12 +7,12 @@
 
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.io.IOException;
-import javax.ejb.EJB;
-import javax.servlet.RequestDispatcher;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.servlet.RequestDispatcher;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServlet;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java b/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java
index 2ecfc55f67e..7d540f0a425 100644
--- a/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/LinkValidator.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.FacesValidator;
-import javax.faces.validator.Validator;
-import javax.faces.validator.ValidatorException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.FacesValidator;
+import jakarta.faces.validator.Validator;
+import jakarta.faces.validator.ValidatorException;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
 @FacesValidator(value = "linkValidator")
diff --git a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java
index 2420ce08550..16d2cc53cb9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/LoginPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/LoginPage.java
@@ -9,30 +9,27 @@
 import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
-import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.JsfHelper;
-import edu.harvard.iq.dataverse.util.SessionUtil;
 
-import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.event.AjaxBehaviorEvent;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.AjaxBehaviorEvent;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
index 2bfd342d899..72fc6ee6d64 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
@@ -5,12 +5,12 @@
  */
 package edu.harvard.iq.dataverse;
 
-import com.sun.mail.smtp.SMTPSendFailedException;
 import edu.harvard.iq.dataverse.authorization.groups.Group;
 import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean;
+import edu.harvard.iq.dataverse.dataset.DatasetUtil;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
 import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -23,25 +23,23 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
-import java.util.Properties;
-import java.util.Map;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.annotation.Resource;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.mail.Address;
-import javax.mail.Message;
-import javax.mail.MessagingException;
-import javax.mail.Session;
-import javax.mail.Transport;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
-import javax.mail.internet.MimeMessage;
+import jakarta.annotation.Resource;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.mail.Address;
+import jakarta.mail.Message;
+import jakarta.mail.MessagingException;
+import jakarta.mail.Session;
+import jakarta.mail.Transport;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
+import jakarta.mail.internet.MimeMessage;
 
 import edu.harvard.iq.dataverse.validation.EMailValidator;
+import jakarta.json.JsonObject;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -81,37 +79,6 @@ public class MailServiceBean implements java.io.Serializable {
     public MailServiceBean() {
     }
 
-    public void sendMail(String host, String reply, String to, String subject, String messageText) {
-        Properties props = System.getProperties();
-        props.put("mail.smtp.host", host);
-        Session session = Session.getDefaultInstance(props, null);
-
-        try {
-            MimeMessage msg = new MimeMessage(session);
-            String[] recipientStrings = to.split(",");
-            InternetAddress[] recipients = new InternetAddress[recipientStrings.length];
-            try {
-            	InternetAddress fromAddress = getSystemAddress();
-                setContactDelegation(reply, fromAddress);
-                msg.setFrom(fromAddress);
-                msg.setReplyTo(new Address[] {new InternetAddress(reply, charset)});
-                for (int i = 0; i < recipients.length; i++) {
-                    recipients[i] = new InternetAddress(recipientStrings[i], "", charset);
-                }
-            } catch (UnsupportedEncodingException ex) {
-                logger.severe(ex.getMessage());
-            }
-            msg.setRecipients(Message.RecipientType.TO, recipients);
-            msg.setSubject(subject, charset);
-            msg.setText(messageText, charset);
-            Transport.send(msg, recipients);
-        } catch (AddressException ae) {
-            ae.printStackTrace(System.out);
-        } catch (MessagingException me) {
-            me.printStackTrace(System.out);
-        }
-    }
-
     @Resource(name = "mail/notifyMailSession")
     private Session session;
 
@@ -154,9 +121,9 @@ public boolean sendSystemEmail(String to, String subject, String messageText, bo
                 try {
                     Transport.send(msg, recipients);
                     sent = true;
-                } catch (SMTPSendFailedException ssfe) {
+                } catch (MessagingException ssfe) {
                     logger.warning("Failed to send mail to: " + to);
-                    logger.warning("SMTPSendFailedException Message: " + ssfe);
+                    logger.warning("MessagingException Message: " + ssfe);
                 }
             } else {
                 logger.fine("Skipping sending mail to " + to + ", because the \"no-reply\" address not set (" + Key.SystemEmail + " setting).");
@@ -177,11 +144,7 @@ public InternetAddress getSystemAddress() {
     }
 
     //@Resource(name="mail/notifyMailSession")
-    public void sendMail(String from, String to, String subject, String messageText) {
-        sendMail(from, to, subject, messageText, new HashMap<>());
-    }
-
-    public void sendMail(String reply, String to, String subject, String messageText, Map<Object, Object> extraHeaders) {
+    public void sendMail(String reply, String to, String cc, String subject, String messageText) {
         try {
             MimeMessage msg = new MimeMessage(session);
             // Always send from system address to avoid email being blocked
@@ -202,18 +165,12 @@ public void sendMail(String reply, String to, String subject, String messageText
             msg.setSentDate(new Date());
             msg.setRecipients(Message.RecipientType.TO,
                     InternetAddress.parse(to, false));
+            if (cc != null) {
+                msg.setRecipients(Message.RecipientType.CC, InternetAddress.parse(cc, false));
+            }
             msg.setSubject(subject, charset);
             msg.setText(messageText, charset);
 
-            if (extraHeaders != null) {
-                for (Object key : extraHeaders.keySet()) {
-                    String headerName = key.toString();
-                    String headerValue = extraHeaders.get(key).toString();
-
-                    msg.addHeader(headerName, headerValue);
-                }
-            }
-
             Transport.send(msg);
         } catch (AddressException ae) {
             ae.printStackTrace(System.out);
@@ -283,11 +240,11 @@ private String getDatasetManageFileAccessLink(DataFile datafile){
     } 
 
     private String getDatasetLink(Dataset dataset){        
-        return  systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString();
+        return  systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString();
     } 
 
     private String getDatasetDraftLink(Dataset dataset){        
-        return  systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId=" + dataset.getGlobalIdString() + "&version=DRAFT" + "&faces-redirect=true"; 
+        return  systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId=" + dataset.getGlobalId().asString() + "&version=DRAFT" + "&faces-redirect=true"; 
     } 
 
     private String getDataverseLink(Dataverse dataverse){       
@@ -425,12 +382,21 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                 logger.fine(dataverseCreatedMessage);
                 return messageText += dataverseCreatedMessage;
             case REQUESTFILEACCESS:
+                //Notification to those who can grant file access requests on a dataset when a user makes a request
                 DataFile datafile = (DataFile) targetObject;
+                
                 pattern = BundleUtil.getStringFromBundle("notification.email.requestFileAccess");
                 String requestorName = (requestor.getLastName() != null && requestor.getLastName() != null) ? requestor.getFirstName() + " " + requestor.getLastName() : BundleUtil.getStringFromBundle("notification.email.info.unavailable");
                 String requestorEmail = requestor.getEmail() != null ? requestor.getEmail() : BundleUtil.getStringFromBundle("notification.email.info.unavailable"); 
                 String[] paramArrayRequestFileAccess = {datafile.getOwner().getDisplayName(), requestorName, requestorEmail, getDatasetManageFileAccessLink(datafile)};
+                messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html");
                 messageText += MessageFormat.format(pattern, paramArrayRequestFileAccess);
+                FileAccessRequest far = datafile.getAccessRequestForAssignee(requestor);
+                GuestbookResponse gbr = far.getGuestbookResponse();
+                if (gbr != null) {
+                    messageText += MessageFormat.format(
+                            BundleUtil.getStringFromBundle("notification.email.requestFileAccess.guestbookResponse"), gbr.toHtmlFormattedResponse());
+                }
                 return messageText;
             case GRANTFILEACCESS:
                 dataset = (Dataset) targetObject;
@@ -535,7 +501,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
             case STATUSUPDATED:
                 version =  (DatasetVersion) targetObject;
                 pattern = BundleUtil.getStringFromBundle("notification.email.status.change");
-                String[] paramArrayStatus = {version.getDataset().getDisplayName(), (version.getExternalStatusLabel()==null) ? "<none>" : version.getExternalStatusLabel()};
+                String[] paramArrayStatus = {version.getDataset().getDisplayName(), (version.getExternalStatusLabel()==null) ? "<none>" : DatasetUtil.getLocaleExternalStatus(version.getExternalStatusLabel())};
                 messageText += MessageFormat.format(pattern, paramArrayStatus);
                 return messageText;
             case CREATEACC:
@@ -555,7 +521,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
             case CHECKSUMFAIL:
                 dataset =  (Dataset) targetObject;
                 String checksumFailMsg = BundleUtil.getStringFromBundle("notification.checksumfail", Arrays.asList(
-                        dataset.getGlobalIdString()
+                        dataset.getGlobalId().asString()
                 ));
                 logger.fine("checksumFailMsg: " + checksumFailMsg);
                 return messageText += checksumFailMsg;
@@ -564,7 +530,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                 version =  (DatasetVersion) targetObject;
                 String fileImportMsg = BundleUtil.getStringFromBundle("notification.mail.import.filesystem", Arrays.asList(
                         systemConfig.getDataverseSiteUrl(),
-                        version.getDataset().getGlobalIdString(),
+                        version.getDataset().getGlobalId().asString(),
                         version.getDataset().getDisplayName()
                 ));
                 logger.fine("fileImportMsg: " + fileImportMsg);
@@ -575,7 +541,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                 messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html");
                 String uploadCompletedMessage = messageText + BundleUtil.getStringFromBundle("notification.mail.globus.upload.completed", Arrays.asList(
                         systemConfig.getDataverseSiteUrl(),
-                        dataset.getGlobalIdString(),
+                        dataset.getGlobalId().asString(),
                         dataset.getDisplayName(),
                         comment
                 ))  ;
@@ -586,7 +552,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                 messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html");
                 String downloadCompletedMessage = messageText + BundleUtil.getStringFromBundle("notification.mail.globus.download.completed", Arrays.asList(
                         systemConfig.getDataverseSiteUrl(),
-                        dataset.getGlobalIdString(),
+                        dataset.getGlobalId().asString(),
                         dataset.getDisplayName(),
                         comment
                 ))  ;
@@ -596,7 +562,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                 messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html");
                 String uploadCompletedWithErrorsMessage = messageText + BundleUtil.getStringFromBundle("notification.mail.globus.upload.completedWithErrors", Arrays.asList(
                         systemConfig.getDataverseSiteUrl(),
-                        dataset.getGlobalIdString(),
+                        dataset.getGlobalId().asString(),
                         dataset.getDisplayName(),
                         comment
                 ))  ;
@@ -607,7 +573,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                 messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html");
                 String downloadCompletedWithErrorsMessage = messageText + BundleUtil.getStringFromBundle("notification.mail.globus.download.completedWithErrors", Arrays.asList(
                         systemConfig.getDataverseSiteUrl(),
-                        dataset.getGlobalIdString(),
+                        dataset.getGlobalId().asString(),
                         dataset.getDisplayName(),
                         comment
                 ))  ;
@@ -616,7 +582,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
             case CHECKSUMIMPORT:
                 version =  (DatasetVersion) targetObject;
                 String checksumImportMsg = BundleUtil.getStringFromBundle("notification.import.checksum", Arrays.asList(
-                        version.getDataset().getGlobalIdString(),
+                        version.getDataset().getGlobalId().asString(),
                         version.getDataset().getDisplayName()
                 ));
                 logger.fine("checksumImportMsg: " + checksumImportMsg);
@@ -632,7 +598,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                 messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html");
                 String ingestedCompletedMessage = messageText + BundleUtil.getStringFromBundle("notification.ingest.completed", Arrays.asList(
                         systemConfig.getDataverseSiteUrl(),
-                        dataset.getGlobalIdString(),
+                        dataset.getGlobalId().asString(),
                         dataset.getDisplayName(),
                         systemConfig.getGuidesBaseUrl(),
                         systemConfig.getGuidesVersion(),
@@ -645,7 +611,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                 messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html");
                 String ingestedCompletedWithErrorsMessage = messageText + BundleUtil.getStringFromBundle("notification.ingest.completedwitherrors", Arrays.asList(
                         systemConfig.getDataverseSiteUrl(),
-                        dataset.getGlobalIdString(),
+                        dataset.getGlobalId().asString(),
                         dataset.getDisplayName(),
                         systemConfig.getGuidesBaseUrl(),
                         systemConfig.getGuidesVersion(),
@@ -656,7 +622,7 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
             case DATASETMENTIONED:
                 String additionalInfo = userNotification.getAdditionalInfo();
                 dataset = (Dataset) targetObject;
-                javax.json.JsonObject citingResource = null;
+                JsonObject citingResource = null;
                 citingResource = JsonUtil.getJsonObject(additionalInfo);
                 
 
@@ -673,6 +639,20 @@ public String getMessageTextBasedOnNotification(UserNotification userNotificatio
                         dataset.getDisplayName()};
                 messageText = MessageFormat.format(pattern, paramArrayDatasetMentioned);
                 return messageText;
+            case REQUESTEDFILEACCESS:
+                //Notification to requestor when they make a request
+                datafile = (DataFile) targetObject;
+                
+                pattern = BundleUtil.getStringFromBundle("notification.email.requestedFileAccess");
+                 messageText = BundleUtil.getStringFromBundle("notification.email.greeting.html");
+                 messageText += MessageFormat.format(pattern, getDvObjectLink(datafile), datafile.getOwner().getDisplayName());
+                far = datafile.getAccessRequestForAssignee(requestor);
+                gbr = far.getGuestbookResponse();
+                if (gbr != null) {
+                    messageText += MessageFormat.format(
+                            BundleUtil.getStringFromBundle("notification.email.requestFileAccess.guestbookResponse"), gbr.toHtmlFormattedResponse());
+                }
+                return messageText;
         }
 
         return "";
@@ -693,6 +673,7 @@ public Object getObjectOfNotification (UserNotification userNotification){
             case CREATEDV:
                 return dataverseService.find(userNotification.getObjectId());
             case REQUESTFILEACCESS:
+            case REQUESTEDFILEACCESS:
                 return dataFileService.find(userNotification.getObjectId());
             case GRANTFILEACCESS:
             case REJECTFILEACCESS:
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
index 09f067f772c..ca2f6145cba 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
+import edu.harvard.iq.dataverse.api.Util;
 import edu.harvard.iq.dataverse.authorization.AuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo;
-import edu.harvard.iq.dataverse.authorization.groups.Group;
 import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -20,24 +20,24 @@
 import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.DateUtil;
 import edu.harvard.iq.dataverse.util.JsfHelper;
 import static edu.harvard.iq.dataverse.util.JsfHelper.JH;
 import java.sql.Timestamp;
 import java.util.*;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import org.apache.commons.lang3.StringUtils;
-import org.primefaces.event.SelectEvent;
-import org.primefaces.event.ToggleSelectEvent;
-import org.primefaces.event.UnselectEvent;
+import java.util.stream.Collectors;
+
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import org.apache.commons.lang3.ObjectUtils;
 
 /**
  *
@@ -73,6 +73,8 @@ public class ManageFilePermissionsPage implements java.io.Serializable {
     DataverseRequestServiceBean dvRequestService;
     @Inject
     PermissionsWrapper permissionsWrapper;
+    @EJB
+    FileAccessRequestServiceBean fileAccessRequestService;
     
     @PersistenceContext(unitName = "VDCNet-ejbPU")
     EntityManager em;
@@ -83,7 +85,21 @@ public class ManageFilePermissionsPage implements java.io.Serializable {
     Dataset dataset = new Dataset(); 
     private final TreeMap<RoleAssignee,List<RoleAssignmentRow>> roleAssigneeMap = new TreeMap<>();
     private final TreeMap<DataFile,List<RoleAssignmentRow>> fileMap = new TreeMap<>();
-    private final TreeMap<AuthenticatedUser,List<DataFile>> fileAccessRequestMap = new TreeMap<>();  
+
+    public TreeMap<AuthenticatedUser, List<FileAccessRequest>> getFileAccessRequestMap() {
+        return fileAccessRequestMap;
+    }
+    
+    public List<DataFile> getDataFilesForRequestor() {
+        List<FileAccessRequest> fars = fileAccessRequestMap.get(getFileRequester());
+        if (fars == null) {
+            return new ArrayList<>();
+        } else {
+            return fars.stream().map(FileAccessRequest::getDataFile).collect(Collectors.toList());
+        }
+    }
+
+    private final TreeMap<AuthenticatedUser,List<FileAccessRequest>> fileAccessRequestMap = new TreeMap<>();
     private boolean showDeleted = true;
 
     public boolean isShowDeleted() {
@@ -110,11 +126,6 @@ public TreeMap<DataFile, List<RoleAssignmentRow>> getFileMap() {
         return fileMap;
     }
 
-    public TreeMap<AuthenticatedUser, List<DataFile>> getFileAccessRequestMap() {
-        return fileAccessRequestMap;
-    }
-    
-    
     private boolean backingShowDeleted = true;
 
     public void showDeletedCheckboxChange() {
@@ -125,7 +136,7 @@ public void showDeletedCheckboxChange() {
         }
 
     }
-    
+
     public String init() {
         if (dataset.getId() != null) {
             dataset = datasetService.find(dataset.getId());
@@ -142,17 +153,17 @@ public String init() {
         initMaps();
         return "";
     }
-    
+
     private void initMaps() {
         // initialize files and usergroup list
         roleAssigneeMap.clear();
         fileMap.clear();
-        fileAccessRequestMap.clear();   
-               
+        fileAccessRequestMap.clear();
+
         for (DataFile file : dataset.getFiles()) {
-            
+
             // only include if the file is restricted (or its draft version is restricted)
-            //Added a null check in case there are files that have no metadata records SEK 
+            //Added a null check in case there are files that have no metadata records SEK
             //for 6587 make sure that a file is in the current version befor adding to the fileMap SEK 2/11/2020
                 if (file.getFileMetadata() != null && (file.isRestricted() || file.getFileMetadata().isRestricted())) {
                     //only test if file is deleted if it's restricted
@@ -169,35 +180,67 @@ private void initMaps() {
                 for (RoleAssignment ra : ras) {
                     // for files, only show role assignments which can download
                     if (ra.getRole().permissions().contains(Permission.DownloadFile)) {
-                        raList.add(new RoleAssignmentRow(ra, roleAssigneeService.getRoleAssignee(ra.getAssigneeIdentifier(), true).getDisplayInfo(), fileIsDeleted));                   
-                        addFileToRoleAssignee(ra, fileIsDeleted);                    
+                        raList.add(new RoleAssignmentRow(ra, roleAssigneeService.getRoleAssignee(ra.getAssigneeIdentifier(), true).getDisplayInfo(), fileIsDeleted));
+                        addFileToRoleAssignee(ra, fileIsDeleted);
                     }
                 }
-                
+
                 file.setDeleted(fileIsDeleted);
-                
+
                 fileMap.put(file, raList);
-                
+
                 // populate the file access requests map
-                for (AuthenticatedUser au : file.getFileAccessRequesters()) {
-                        List<DataFile> requestedFiles = fileAccessRequestMap.get(au);
-                        if (requestedFiles == null) {
-                            requestedFiles = new ArrayList<>();
-                            AuthenticatedUser withProvider = authenticationService.getAuthenticatedUserWithProvider(au.getUserIdentifier());                           
-                            fileAccessRequestMap.put(withProvider, requestedFiles);
-                        }
-                        requestedFiles.add(file);                                       
+                for (FileAccessRequest fileAccessRequest : file.getFileAccessRequests(FileAccessRequest.RequestState.CREATED)) {
+                    List<FileAccessRequest> fileAccessRequestList = fileAccessRequestMap.get(fileAccessRequest.getRequester());
+                    if (fileAccessRequestList == null) {
+                        fileAccessRequestList = new ArrayList<>();
+                        AuthenticatedUser withProvider = authenticationService.getAuthenticatedUserWithProvider(fileAccessRequest.getRequester().getUserIdentifier());
+                        fileAccessRequestMap.put(withProvider, fileAccessRequestList);
+                    }
+                    fileAccessRequestList.add(fileAccessRequest);
                 }
-            }  
+            }
         }
-        
     }
-    
+
     public String getAuthProviderFriendlyName(String authProviderId){
-        
         return AuthenticationProvider.getFriendlyName(authProviderId);
     }
-    
+
+    Date getAccessRequestDate(List<FileAccessRequest> fileAccessRequests){
+        if (fileAccessRequests == null) {
+            return null;
+        }
+
+        // find the oldest date in the list of available and return a formatted date, or null if no dates were found
+        return fileAccessRequests.stream()
+            .filter(fileAccessRequest -> fileAccessRequest.getCreationTime() != null)
+            .min((a, b) -> ObjectUtils.compare(a.getCreationTime(), b.getCreationTime(), true))
+            .map(FileAccessRequest::getCreationTime)
+            .orElse(null);
+    }
+
+    public String formatAccessRequestDate(List<FileAccessRequest> fileAccessRequests){
+        Date date = getAccessRequestDate(fileAccessRequests);
+
+        if (date == null) {
+            return null;
+        }
+
+        return DateUtil.formatDate(date);
+    }
+
+
+    public String formatAccessRequestTimestamp(List<FileAccessRequest> fileAccessRequests){
+        Date date = getAccessRequestDate(fileAccessRequests);
+
+        if (date == null) {
+            return null;
+        }
+
+        return Util.getDateTimeFormat().format(date);
+    }
+
     private void addFileToRoleAssignee(RoleAssignment assignment, boolean fileDeleted) {
         RoleAssignee ra = roleAssigneeService.getRoleAssignee(assignment.getAssigneeIdentifier());
         List<RoleAssignmentRow> assignments = roleAssigneeMap.get(ra);
@@ -354,7 +397,10 @@ public void initAssignDialogForFileRequester(AuthenticatedUser au) {
         fileRequester = au;
         selectedRoleAssignees = null;
         selectedFiles.clear();
-        selectedFiles.addAll(fileAccessRequestMap.get(au));    
+
+        for (FileAccessRequest fileAccessRequest : fileAccessRequestMap.get(au)) {
+            selectedFiles.add(fileAccessRequest.getDataFile());
+        }
         showUserGroupMessages();
     }     
     
@@ -373,12 +419,14 @@ public void grantAccess(ActionEvent evt) {
                     if (file.isReleased()) {
                         sendNotification = true;
                     }
-                    // remove request, if it exist
-                    if (file.getFileAccessRequesters().remove(roleAssignee)) {
-                        datafileService.save(file);
-                    }                  
-                }               
-            
+                    // set request(s) granted, if they exist
+                    for (AuthenticatedUser au : roleAssigneeService.getExplicitUsers(roleAssignee)) {
+                        FileAccessRequest far = file.getAccessRequestForAssignee(au);
+                        far.setStateGranted();
+                    }
+                    datafileService.save(file);
+                }
+
             }
 
             if (sendNotification) {
@@ -387,7 +435,7 @@ public void grantAccess(ActionEvent evt) {
                 }
              }
         }
-        
+
         initMaps();
     }
     
@@ -396,23 +444,33 @@ public void grantAccessToRequests(AuthenticatedUser au) {
     }
     
     public void grantAccessToAllRequests(AuthenticatedUser au) {
-        grantAccessToRequests(au, fileAccessRequestMap.get(au));
-    }    
+        List<DataFile> files = new ArrayList<>();
+
+        for (FileAccessRequest fileAccessRequest : fileAccessRequestMap.get(au)) {
+            files.add(fileAccessRequest.getDataFile());
+        }
+
+        grantAccessToRequests(au, files);
+    }
 
     private void grantAccessToRequests(AuthenticatedUser au, List<DataFile> files) {
         boolean actionPerformed = false;
         // Find the built in file downloader role (currently by alias) 
         DataverseRole fileDownloaderRole = roleService.findBuiltinRoleByAlias(DataverseRole.FILE_DOWNLOADER);
         for (DataFile file : files) {
-            if (assignRole(au, file, fileDownloaderRole)) {                
-                file.getFileAccessRequesters().remove(au);
-                datafileService.save(file);
+            if (assignRole(au, file, fileDownloaderRole)) {
+                FileAccessRequest far = file.getAccessRequestForAssignee(au);
+                if (far!=null) {
+                    far.setStateGranted();
+                    datafileService.save(file);
+                }
                 actionPerformed = true;
             }
         }
+
         if (actionPerformed) {
             JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("permission.fileAccessGranted", Arrays.asList(au.getDisplayInfo().getTitle())));
-            userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.GRANTFILEACCESS, dataset.getId()); 
+            userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.GRANTFILEACCESS, dataset.getId());
             initMaps();
         }
 
@@ -423,24 +481,34 @@ public void rejectAccessToRequests(AuthenticatedUser au) {
     }
     
     public void rejectAccessToAllRequests(AuthenticatedUser au) {
-        rejectAccessToRequests(au, fileAccessRequestMap.get(au));
-    }    
+        List<DataFile> files = new ArrayList<>();
+
+        for (FileAccessRequest fileAccessRequest : fileAccessRequestMap.get(au)) {
+            files.add(fileAccessRequest.getDataFile());
+        }
+
+        rejectAccessToRequests(au, files);
+    }
 
     private void rejectAccessToRequests(AuthenticatedUser au, List<DataFile> files) {
-        boolean actionPerformed = false;        
-        for (DataFile file : files) {               
-            file.getFileAccessRequesters().remove(au);
-            datafileService.save(file);
-            actionPerformed = true;
+        boolean actionPerformed = false;
+        for (DataFile file : files) {
+            FileAccessRequest far = file.getAccessRequestForAssignee(au);
+            if(far!=null) {
+                far.setStateRejected();
+                fileAccessRequestService.save(far);
+                file.removeFileAccessRequest(far);
+                datafileService.save(file);
+                actionPerformed = true;
+            }
         }
 
-        
         if (actionPerformed) {
             JsfHelper.addSuccessMessage(BundleUtil.getStringFromBundle("permission.fileAccessRejected", Arrays.asList(au.getDisplayInfo().getTitle())));
-            userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REJECTFILEACCESS, dataset.getId());        
+            userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REJECTFILEACCESS, dataset.getId());
             initMaps();
         }
-    }    
+    }
 
     private boolean assignRole(RoleAssignee ra,  DataFile file, DataverseRole r) {
         try {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
index 8513ca33b47..583e195ab0d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
@@ -22,17 +22,17 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.commons.lang3.StringUtils;
 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java
index 7db0ecc0767..cc89cfd9d56 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java
@@ -11,17 +11,19 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.AbortProcessingException;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.event.AjaxBehaviorEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.servlet.ServletOutputStream;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
@@ -325,7 +327,7 @@ public void setDisplayDownloadAll(boolean displayDownloadAll) {
         this.displayDownloadAll = displayDownloadAll;
     }
 
-    public String updateGuestbooksRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException {
+    public String updateGuestbooksRoot(AjaxBehaviorEvent event) throws AbortProcessingException {
         try {
             dataverse = engineService.submit(
                     new UpdateDataverseGuestbookRootCommand(!isInheritGuestbooksValue(),
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
index e71e04bc42f..0e277c5aa32 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
@@ -29,18 +29,17 @@
 import java.util.Date;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.ResourceBundle;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.commons.text.StringEscapeUtils;
 
 /**
@@ -56,6 +55,8 @@ public class ManagePermissionsPage implements java.io.Serializable {
     @EJB
     DvObjectServiceBean dvObjectService;
     @EJB
+    FileAccessRequestServiceBean fileAccessRequestService;
+    @EJB
     DataverseRoleServiceBean roleService;
     @EJB
     RoleAssigneeServiceBean roleAssigneeService;
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java
index 4578a01e693..98369a2eab3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageTemplatesPage.java
@@ -14,15 +14,17 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import jakarta.faces.event.AbortProcessingException;
+import jakarta.faces.event.AjaxBehaviorEvent;
 /**
  *
  * @author skraffmiller
@@ -60,6 +62,9 @@ public class ManageTemplatesPage implements java.io.Serializable {
     
     @Inject
     LicenseServiceBean licenseServiceBean;
+    
+    @Inject
+    SettingsWrapper settingsWrapper;
 
     private List<Template> templates;
     private Dataverse dataverse;
@@ -230,11 +235,11 @@ public void setSelectedTemplate(Template selectedTemplate) {
 
     public void viewSelectedTemplate(Template selectedTemplate) {
         this.selectedTemplate = selectedTemplate;
-        this.selectedTemplate.setMetadataValueBlocks();
+        this.selectedTemplate.setMetadataValueBlocks(settingsWrapper.getSystemMetadataBlocks());
         tempPage.setTemplate(selectedTemplate);
     }
 
-    public String updateTemplatesRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException {
+    public String updateTemplatesRoot(AjaxBehaviorEvent event) throws AbortProcessingException {
         try {
             if (dataverse.getOwner() != null) {
                 if (isInheritTemplatesValue() && dataverse.getDefaultTemplate() == null && dataverse.getOwner().getDefaultTemplate() != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java
index 33e75efffb5..0fd7c2efbc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlock.java
@@ -8,21 +8,21 @@
 import java.util.List;
 import java.util.MissingResourceException;
 import java.util.Objects;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java
index c5bd48ae785..49c50e82efb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockConverter.java
@@ -5,13 +5,13 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java
index a25480102f2..bb6daa264ba 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MetadataBlockServiceBean.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse;
 
 import java.util.List;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
@@ -18,8 +18,6 @@ public class MetadataBlockServiceBean {
     @PersistenceContext(unitName = "VDCNet-ejbPU")
     private EntityManager em;
     
-    
-    
     public MetadataBlock save(MetadataBlock mdb) {
        return em.merge(mdb);
     }   
diff --git a/src/main/java/edu/harvard/iq/dataverse/Metric.java b/src/main/java/edu/harvard/iq/dataverse/Metric.java
index 5526604f77c..0e71ab44db4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Metric.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Metric.java
@@ -5,21 +5,20 @@
  */
 package edu.harvard.iq.dataverse;
 
-import java.io.IOException;
 import java.io.Serializable;
 import java.sql.Timestamp;
 import java.util.Date;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
index 37a11396f37..832d7ec19ef 100644
--- a/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/NavigationWrapper.java
@@ -14,12 +14,12 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -96,7 +96,8 @@ private String sendError(int errorCode) {
         try {
             context.getExternalContext().responseSendError(errorCode,null);
         } catch (IOException ex) {
-            Logger.getLogger(PermissionsWrapper.class.getName()).log(Level.SEVERE, null, ex);
+            //Logger.getLogger(PermissionsWrapper.class.getName()).log(Level.SEVERE, null, ex);
+            Logger.getLogger(NavigationWrapper.class.getName()).fine("Caught exception in sendError(): "+ex.getMessage());
         }
         context.responseComplete();
         return "";
diff --git a/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java
index fac2abeddb8..0c5218fb927 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PackagePopupFragmentBean.java
@@ -6,8 +6,8 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.faces.view.ViewScoped;
-import javax.inject.Named;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
index 8f7f53de1a2..a1de33a764e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
@@ -1,6 +1,5 @@
 package edu.harvard.iq.dataverse;
 
-import edu.harvard.iq.dataverse.DatasetLock.Reason;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
@@ -17,14 +16,14 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.util.HashSet;
 import java.util.List;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import static edu.harvard.iq.dataverse.engine.command.CommandHelper.CH;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
@@ -41,7 +40,7 @@
 import java.util.logging.Level;
 import java.util.stream.Collectors;
 import static java.util.stream.Collectors.toList;
-import javax.persistence.Query;
+import jakarta.persistence.Query;
 
 /**
  * Your one-stop-shop for deciding which user can do what action on which
@@ -750,6 +749,14 @@ else if (dataset.isLockedFor(DatasetLock.Reason.InReview)) {
             }
         }
     }
+
+    public void checkUpdateDatasetVersionLock(Dataset dataset, DataverseRequest dataverseRequest, Command<?> command) throws IllegalCommandException {
+        boolean hasAtLeastOneLockThatIsNotAnIngestLock = dataset.isLocked() && dataset.getLocks().stream()
+            .anyMatch(lock -> !DatasetLock.Reason.Ingest.equals(lock.getReason()));
+        if (hasAtLeastOneLockThatIsNotAnIngestLock) {
+            checkEditDatasetLock(dataset, dataverseRequest, command);
+        }
+    }
     
     public void checkPublishDatasetLock(Dataset dataset, DataverseRequest dataverseRequest, Command command) throws IllegalCommandException {
         if (dataset.isLocked()) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
index 4ee45fc85a1..5ce9edb3a9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
@@ -14,10 +14,10 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java
index a5e4cebbd95..0d863f47324 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeConverter.java
@@ -7,13 +7,13 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
 
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
index c6f2b7f28a5..88acc1916cf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
@@ -11,6 +11,7 @@
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.mydata.MyDataFilterParams;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlUtil;
@@ -21,12 +22,12 @@
 import java.util.TreeMap;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -96,18 +97,18 @@ public RoleAssignee getRoleAssignee(String identifier, Boolean augmented) {
         if (identifier == null || identifier.isEmpty()) {
             throw new IllegalArgumentException("Identifier cannot be null or empty string.");
         }
-        switch (identifier.charAt(0)) {
-            case ':':
+        switch (identifier.substring(0,1)) {
+            case ":":
                 return predefinedRoleAssignees.get(identifier);
-            case '@':
+            case AuthenticatedUser.IDENTIFIER_PREFIX:
                 if (!augmented){
                     return authSvc.getAuthenticatedUser(identifier.substring(1));
                 } else {
                     return authSvc.getAuthenticatedUserWithProvider(identifier.substring(1));
-                }                
-            case '&':
+                }
+            case Group.IDENTIFIER_PREFIX:
                 return groupSvc.getGroup(identifier.substring(1));
-            case '#':
+            case PrivateUrlUser.PREFIX:
                 return PrivateUrlUtil.identifier2roleAssignee(identifier);
             default:
                 throw new IllegalArgumentException("Unsupported assignee identifier '" + identifier + "'");
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
index f053a449da4..df004fe1357 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssignment.java
@@ -3,19 +3,19 @@
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import java.util.Objects;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
+import jakarta.persistence.UniqueConstraint;
 
 /**
  * A role of a user in a Dataverse. A User may have many roles in a given Dataverse.
diff --git a/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java b/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
index dd3044d3749..1bd337452c2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RolePermissionFragment.java
@@ -26,16 +26,18 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import jakarta.faces.event.AbortProcessingException;
+import jakarta.faces.event.AjaxBehaviorEvent;
 import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.StringUtils;
 
@@ -92,7 +94,7 @@ public void setInheritAssignments(boolean inheritAssignments) {
         this.inheritAssignments = inheritAssignments;
     }
 
-    public void updatePermissionRoot(javax.faces.event.AjaxBehaviorEvent event) throws javax.faces.event.AbortProcessingException {
+    public void updatePermissionRoot(AjaxBehaviorEvent event) throws AbortProcessingException {
         try {
             dvObject = commandEngine.submit(
                     new UpdatePermissionRootCommand(!inheritAssignments, 
diff --git a/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java b/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
index 054ed61f320..71318a0184a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/S3PackageImporter.java
@@ -17,9 +17,6 @@
 import com.amazonaws.services.s3.model.S3Object;
 import com.amazonaws.services.s3.model.S3ObjectSummary;
 import edu.harvard.iq.dataverse.api.AbstractApiBean;
-import edu.harvard.iq.dataverse.batch.jobs.importer.filesystem.FileRecordWriter;
-import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
-import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import java.io.BufferedReader;
@@ -31,9 +28,9 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 
 /**
  * This class is for importing files added to s3 outside of dataverse.
@@ -209,7 +206,7 @@ public DataFile createPackageDataFile(Dataset dataset, String folderName, long t
 
         GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(packageFile.getProtocol(), commandEngine.getContext());
         if (packageFile.getIdentifier() == null || packageFile.getIdentifier().isEmpty()) {
-            String packageIdentifier = dataFileServiceBean.generateDataFileIdentifier(packageFile, idServiceBean);
+            String packageIdentifier = idServiceBean.generateDataFileIdentifier(packageFile);
             packageFile.setIdentifier(packageIdentifier);
         }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
index 363972b48c3..6be768321c4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
@@ -3,23 +3,24 @@
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.feedback.Feedback;
 import edu.harvard.iq.dataverse.feedback.FeedbackUtil;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.MailUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import java.util.List;
+import java.util.Optional;
 import java.util.Random;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.mail.internet.InternetAddress;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.mail.internet.InternetAddress;
 import org.apache.commons.validator.routines.EmailValidator;
 
 @ViewScoped
@@ -62,7 +63,7 @@ public class SendFeedbackDialog implements java.io.Serializable {
      * Either the dataverse or the dataset that the message is pertaining to. If
      * there is no recipient, this is a general feedback message.
      */
-    private DvObject recipient;
+    private DvObject feedbackTarget;
 
     /**
      * :SystemEmail (the main support address for an installation).
@@ -97,11 +98,11 @@ public void initUserInput(ActionEvent ae) {
         userMessage = "";
         messageSubject = "";
         Random random = new Random();
-        op1 = new Long(random.nextInt(10));
-        op2 = new Long(random.nextInt(10));
+        op1 = Long.valueOf(random.nextInt(10));
+        op2 = Long.valueOf(random.nextInt(10));
         userSum = null;
-        String systemEmail = settingsService.getValueForKey(SettingsServiceBean.Key.SystemEmail);
-        systemAddress = MailUtil.parseSystemAddress(systemEmail);
+        String supportEmail = JvmSettings.SUPPORT_EMAIL.lookupOptional().orElse(settingsService.getValueForKey(SettingsServiceBean.Key.SystemEmail));
+        systemAddress = MailUtil.parseSystemAddress(supportEmail);
     }
 
     public Long getOp1() {
@@ -129,19 +130,27 @@ public void setUserSum(Long userSum) {
     }
 
     public String getMessageTo() {
-        if (recipient == null) {
+        if (feedbackTarget == null) {
             return BrandingUtil.getSupportTeamName(systemAddress);
-        } else if (recipient.isInstanceofDataverse()) {
-            return ((Dataverse) recipient).getDisplayName() + " " + BundleUtil.getStringFromBundle("contact.contact");
+        } else if (feedbackTarget.isInstanceofDataverse()) {
+            return ((Dataverse) feedbackTarget).getDisplayName() + " " + BundleUtil.getStringFromBundle("contact.contact");
         } else {
             return BundleUtil.getStringFromBundle("dataset") + " " + BundleUtil.getStringFromBundle("contact.contact");
         }
     }
+    
+    public String getMessageCC() {
+        if (ccSupport()) {
+            return BrandingUtil.getSupportTeamName(systemAddress);
+        }
+        return null;
+    }
+
 
     public String getFormHeader() {
-        if (recipient == null) {
+        if (feedbackTarget == null) {
             return BrandingUtil.getContactHeader(systemAddress);
-        } else if (recipient.isInstanceofDataverse()) {
+        } else if (feedbackTarget.isInstanceofDataverse()) {
             return BundleUtil.getStringFromBundle("contact.dataverse.header");
         } else {
             return BundleUtil.getStringFromBundle("contact.dataset.header");
@@ -173,11 +182,11 @@ public String loggedInUserEmail() {
     }
 
     public DvObject getRecipient() {
-        return recipient;
+        return feedbackTarget;
     }
 
     public void setRecipient(DvObject recipient) {
-        this.recipient = recipient;
+        this.feedbackTarget = recipient;
     }
 
     public void validateUserSum(FacesContext context, UIComponent component, Object value) throws ValidatorException {
@@ -200,16 +209,26 @@ public void validateUserEmail(FacesContext context, UIComponent component, Objec
     public String sendMessage() {
         String installationBrandName = BrandingUtil.getInstallationBrandName();
         String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress);
-        List<Feedback> feedbacks = FeedbackUtil.gatherFeedback(recipient, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, systemConfig.getDataverseSiteUrl(), installationBrandName, supportTeamName);
-        if (feedbacks.isEmpty()) {
+
+        Feedback feedback = FeedbackUtil.gatherFeedback(feedbackTarget, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, systemConfig.getDataverseSiteUrl(), installationBrandName, supportTeamName, ccSupport());
+        if (feedback==null) {
             logger.warning("No feedback has been sent!");
             return null;
         }
-        for (Feedback feedback : feedbacks) {
             logger.fine("sending feedback: " + feedback);
-            mailService.sendMail(feedback.getFromEmail(), feedback.getToEmail(), feedback.getSubject(), feedback.getBody());
-        }
+            mailService.sendMail(feedback.getFromEmail(), feedback.getToEmail(), feedback.getCcEmail(), feedback.getSubject(), feedback.getBody());
         return null;
     }
+    
+    public boolean ccSupport() {
+        return ccSupport(feedbackTarget);
+    }
+    
+    public static boolean ccSupport(DvObject feedbackTarget) {
+        //Setting is enabled and this isn't already a direct message to support (no feedbackTarget)
+        Optional<Boolean> ccSupport = JvmSettings.CC_SUPPORT_ON_CONTACT_EMAIL.lookupOptional(Boolean.class);
+        
+        return feedbackTarget!=null && ccSupport.isPresent() &&ccSupport.get();
+    }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
index bf36f265743..8ab1e87aef2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java
@@ -6,6 +6,10 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
+import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO;
+import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.Setting;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
@@ -13,7 +17,6 @@
 import edu.harvard.iq.dataverse.util.MailUtil;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.UserNotification.Type;
 
 import java.time.LocalDate;
@@ -24,19 +27,20 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Optional;
 import java.util.logging.Logger;
 import java.util.Set;
 
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Named;
-import javax.json.JsonObject;
-import javax.mail.internet.InternetAddress;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Named;
+import jakarta.json.JsonObject;
+import jakarta.mail.internet.InternetAddress;
 
 /**
  *
@@ -59,6 +63,9 @@ public class SettingsWrapper implements java.io.Serializable {
     
     @EJB
     DatasetFieldServiceBean fieldService;
+    
+    @EJB
+    MetadataBlockServiceBean mdbService;
 
     private Map<String, String> settingsMap;
     
@@ -84,6 +91,7 @@ public class SettingsWrapper implements java.io.Serializable {
     
     //External Vocabulary support
     private Map<Long, JsonObject> cachedCvocMap = null;
+    private Map<Long, JsonObject> cachedCvocByTermFieldMap = null;
     
     private Long zipDownloadLimit = null; 
     
@@ -115,6 +123,8 @@ public class SettingsWrapper implements java.io.Serializable {
     
     private Boolean customLicenseAllowed = null;
     
+    private List<MetadataBlock> systemMetadataBlocks;
+    
     private Set<Type> alwaysMuted = null;
 
     private Set<Type> neverMuted = null;
@@ -326,12 +336,29 @@ public boolean isGlobusFileDownload() {
     }
     
     public boolean isGlobusEnabledStorageDriver(String driverId) {
-        if (globusStoreList == null) {
-            globusStoreList = systemConfig.getGlobusStoresList();
+        return (GlobusAccessibleStore.acceptsGlobusTransfers(driverId) || GlobusAccessibleStore.allowsGlobusReferences(driverId));
+    }
+    
+    public boolean isDownloadable(FileMetadata fmd) {
+        boolean downloadable=true;
+        if(isGlobusFileDownload()) {
+            String driverId = DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier());
+            
+            downloadable = downloadable && !AbstractRemoteOverlayAccessIO.isNotDataverseAccessible(driverId); 
         }
-        return globusStoreList.contains(driverId);
+        return downloadable;
     }
     
+    public boolean isGlobusTransferable(FileMetadata fmd) {
+        boolean globusTransferable=true;
+        if(isGlobusFileDownload()) {
+            String driverId = DataAccess.getStorageDriverFromIdentifier(fmd.getDataFile().getStorageIdentifier());
+            globusTransferable = GlobusAccessibleStore.isGlobusAccessible(driverId);
+        }
+        return globusTransferable;
+    }
+    
+    
     public String getGlobusAppUrl() {
         if (globusAppUrl == null) {
             globusAppUrl = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusAppUrl, "http://localhost");
@@ -464,7 +491,16 @@ public boolean isDataCiteInstallation() {
     public boolean isMakeDataCountDisplayEnabled() {
         boolean safeDefaultIfKeyNotFound = (getValueForKey(SettingsServiceBean.Key.MDCLogPath)!=null); //Backward compatible
         return isTrueForKey(SettingsServiceBean.Key.DisplayMDCMetrics, safeDefaultIfKeyNotFound);
+    }
     
+    public LocalDate getMDCStartDate() {
+        String date = getValueForKey(SettingsServiceBean.Key.MDCStartDate);
+        LocalDate ld=null;
+        if(date!=null) {
+          ld = LocalDate.parse(date);
+        }
+        return ld;
+        
     }
     
     public boolean displayChronologicalDateFacets() {
@@ -579,19 +615,24 @@ public Map<String, String> getBaseMetadataLanguageMap(boolean refresh) {
     public Map<String, String> getMetadataLanguages(DvObjectContainer target) {
         Map<String,String> currentMap = new HashMap<String,String>();
         currentMap.putAll(getBaseMetadataLanguageMap(false));
-        currentMap.put(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE, getDefaultMetadataLanguageLabel(target));
+        currentMap.put(DvObjectContainer.UNDEFINED_CODE, getDefaultMetadataLanguageLabel(target));
         return currentMap;
     }
-    
+
     private String getDefaultMetadataLanguageLabel(DvObjectContainer target) {
         String mlLabel = BundleUtil.getStringFromBundle("dataverse.metadatalanguage.setatdatasetcreation");
-        String mlCode = target.getEffectiveMetadataLanguage();
-        // If it's 'undefined', it's the global default
-        if (!mlCode.equals(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE)) {
-            // Get the label for the language code found
-            mlLabel = getBaseMetadataLanguageMap(false).get(mlCode);
-            mlLabel = mlLabel + " " + BundleUtil.getStringFromBundle("dataverse.inherited");
+
+        if(target.getOwner() != null) { // Root collection is excluded from inherit metadata language research
+            String mlCode = target.getOwner().getEffectiveMetadataLanguage();
+
+            // If it's undefined, no parent has a metadata language defined, and the global default should be used.
+            if (!mlCode.equals(DvObjectContainer.UNDEFINED_CODE)) {
+                // Get the label for the language code found
+                mlLabel = getBaseMetadataLanguageMap(false).get(mlCode);
+                mlLabel = mlLabel + " " + BundleUtil.getStringFromBundle("dataverse.inherited");
+            }
         }
+
         return mlLabel;
     }
     
@@ -603,7 +644,7 @@ public String getDefaultMetadataLanguage() {
             return (String) mdMap.keySet().toArray()[0];
             } else {
                 //More than one - :MetadataLanguages is set and the default is undefined (users must choose if the collection doesn't override the default)
-                return DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE;
+                return DvObjectContainer.UNDEFINED_CODE;
             }
         } else {
             // None - :MetadataLanguages is not set so return null to turn off the display (backward compatibility)
@@ -611,6 +652,32 @@ public String getDefaultMetadataLanguage() {
         }
     }
     
+    public Map<String, String> getGuestbookEntryOptions(DvObjectContainer target) {
+        Map<String, String> currentMap = new HashMap<String, String>();
+        String atDownload = BundleUtil.getStringFromBundle("dataverse.guestbookentry.atdownload");
+        String atRequest = BundleUtil.getStringFromBundle("dataverse.guestbookentry.atrequest");
+        Optional<Boolean> gbDefault = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
+        if (gbDefault.isPresent()) {
+            // Three options - inherited/default option, at Download, at Request
+            String useDefault = null;
+            if (target.getOwner() == null) {
+                boolean defaultOption = gbDefault.get();
+                useDefault = (defaultOption ? atRequest : atDownload)
+                        + BundleUtil.getStringFromBundle("dataverse.default");
+            } else {
+                boolean defaultOption = target.getOwner().getEffectiveGuestbookEntryAtRequest();
+                useDefault = (defaultOption ? atRequest : atDownload)
+                        + BundleUtil.getStringFromBundle("dataverse.inherited");
+            }
+            currentMap.put(DvObjectContainer.UNDEFINED_CODE, useDefault);
+            currentMap.put(Boolean.toString(true), atRequest);
+            currentMap.put(Boolean.toString(false), atDownload);
+        } else {
+            // Setting not defined - leave empty
+        }
+        return currentMap;
+    }
+    
     public Dataverse getRootDataverse() {
         if (rootDataverse == null) {
             rootDataverse = dataverseService.findRootDataverse();
@@ -656,12 +723,19 @@ public String getFooterCopyrightAndYear() {
         return footerCopyrightAndYear; 
     }
     
-    public Map<Long, JsonObject> getCVocConf() {
-        //Cache this in the view
-        if(cachedCvocMap==null) {
-        cachedCvocMap = fieldService.getCVocConf(false);
+    public Map<Long, JsonObject> getCVocConf(boolean byTermField) {
+        if (byTermField) {
+            if (cachedCvocByTermFieldMap == null) {
+                cachedCvocByTermFieldMap = fieldService.getCVocConf(true);
+            }
+            return cachedCvocByTermFieldMap;
+        } else {
+            // Cache this in the view
+            if (cachedCvocMap == null) {
+                cachedCvocMap = fieldService.getCVocConf(false);
+            }
+            return cachedCvocMap;
         }
-        return cachedCvocMap;
     }
     
     public String getMetricsUrl() {
@@ -700,4 +774,20 @@ public boolean isCustomLicenseAllowed() {
         }
         return customLicenseAllowed;
     }
+
+    public List<MetadataBlock> getSystemMetadataBlocks() {
+
+        if (systemMetadataBlocks == null) {
+            systemMetadataBlocks = new ArrayList<MetadataBlock>();
+        }
+        List<MetadataBlock> blocks = mdbService.listMetadataBlocks();
+        for (MetadataBlock mdb : blocks) {
+            String smdbString = JvmSettings.MDB_SYSTEM_KEY_FOR.lookupOptional(mdb.getName()).orElse(null);
+            if (smdbString != null) {
+                systemMetadataBlocks.add(mdb);
+            }
+        }
+
+        return systemMetadataBlocks;
+    }
 }
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/Shib.java b/src/main/java/edu/harvard/iq/dataverse/Shib.java
index 0f0e20aba94..24c0f9d7926 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Shib.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Shib.java
@@ -24,16 +24,17 @@
 import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
+import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.ExternalContext;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.ExternalContext;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 @ViewScoped
 @Named("Shib")
@@ -62,7 +63,7 @@ public class Shib implements java.io.Serializable {
     HttpServletRequest request;
 
     private String userPersistentId;
-    private String internalUserIdentifer;
+    private String internalUserIdentifier;
     AuthenticatedUserDisplayInfo displayInfo;
     /**
      * @todo Remove this boolean some day? Now the mockups show a popup. Should
@@ -210,8 +211,8 @@ public void init() {
         }
 
         String usernameAssertion = getValueFromAssertion(ShibUtil.usernameAttribute);
-        internalUserIdentifer = ShibUtil.generateFriendlyLookingUserIdentifer(usernameAssertion, emailAddress);
-        logger.fine("friendly looking identifer (backend will enforce uniqueness):" + internalUserIdentifer);
+        internalUserIdentifier = ShibUtil.generateFriendlyLookingUserIdentifier(usernameAssertion, emailAddress);
+        logger.log(Level.FINE, "friendly looking identifier (backend will enforce uniqueness): {0}", internalUserIdentifier);
 
         String shibAffiliationAttribute = settingsService.getValueForKey(SettingsServiceBean.Key.ShibAffiliationAttribute);
         String affiliation = (StringUtils.isNotBlank(shibAffiliationAttribute))
@@ -326,7 +327,7 @@ public String confirmAndCreateAccount() {
         AuthenticatedUser au = null;
         try {
             au = authSvc.createAuthenticatedUser(
-                    new UserRecordIdentifier(shibAuthProvider.getId(), lookupStringPerAuthProvider), internalUserIdentifer, displayInfo, true);
+                    new UserRecordIdentifier(shibAuthProvider.getId(), lookupStringPerAuthProvider), internalUserIdentifier, displayInfo, true);
         } catch (EJBException ex) {
             /**
              * @todo Show the ConstraintViolationException, if any.
@@ -354,7 +355,7 @@ public String confirmAndConvertAccount() {
         visibleTermsOfUse = false;
         ShibAuthenticationProvider shibAuthProvider = new ShibAuthenticationProvider();
         String lookupStringPerAuthProvider = userPersistentId;
-        UserIdentifier userIdentifier = new UserIdentifier(lookupStringPerAuthProvider, internalUserIdentifer);
+        UserIdentifier userIdentifier = new UserIdentifier(lookupStringPerAuthProvider, internalUserIdentifier);
         logger.fine("builtin username: " + builtinUsername);
         AuthenticatedUser builtInUserToConvert = authSvc.canLogInAsBuiltinUser(builtinUsername, builtinPassword);
         if (builtInUserToConvert != null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java b/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java
index cd6d53fd8a8..adf2e7d3010 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SuperUserPage.java
@@ -6,11 +6,11 @@
 import java.util.concurrent.CancellationException;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.JsonObjectBuilder;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.JsonObjectBuilder;
 
 @SessionScoped
 @Named("SuperUserPage")
diff --git a/src/main/java/edu/harvard/iq/dataverse/Template.java b/src/main/java/edu/harvard/iq/dataverse/Template.java
index 7798367b4d9..05c6df51197 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Template.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Template.java
@@ -12,31 +12,31 @@
 import java.util.TreeMap;
 import java.util.stream.Collectors;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.persistence.Transient;
-import javax.validation.constraints.Size;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.Size;
 
 import edu.harvard.iq.dataverse.util.DateUtil;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
@@ -60,14 +60,13 @@ public class Template implements Serializable {
     private Long id;
 
     public Template() {
-
     }
 
     //Constructor for create
-    public Template(Dataverse dataverseIn) {
+    public Template(Dataverse dataverseIn, List<MetadataBlock> systemMDBlocks) {
         dataverse = dataverseIn;
         datasetFields = initDatasetFields();
-        initMetadataBlocksForCreate();
+        initMetadataBlocksForCreate(systemMDBlocks);
     }
 
     public Long getId() {
@@ -247,24 +246,26 @@ public int compare(DatasetField d1, DatasetField d2) {
         return dsfList;
     }
 
-    private void initMetadataBlocksForCreate() {
+    private void initMetadataBlocksForCreate(List<MetadataBlock> systemMDBlocks) {
         metadataBlocksForEdit.clear();
         for (MetadataBlock mdb : this.getDataverse().getMetadataBlocks()) {
-            List<DatasetField> datasetFieldsForEdit = new ArrayList<>();
-            for (DatasetField dsf : this.getDatasetFields()) {
+            if (!systemMDBlocks.contains(mdb)) {
+                List<DatasetField> datasetFieldsForEdit = new ArrayList<>();
+                for (DatasetField dsf : this.getDatasetFields()) {
 
-                if (dsf.getDatasetFieldType().getMetadataBlock().equals(mdb)) {
-                    datasetFieldsForEdit.add(dsf);
+                    if (dsf.getDatasetFieldType().getMetadataBlock().equals(mdb)) {
+                        datasetFieldsForEdit.add(dsf);
+                    }
                 }
-            }
 
-            if (!datasetFieldsForEdit.isEmpty()) {
-                metadataBlocksForEdit.put(mdb, sortDatasetFields(datasetFieldsForEdit));
+                if (!datasetFieldsForEdit.isEmpty()) {
+                    metadataBlocksForEdit.put(mdb, sortDatasetFields(datasetFieldsForEdit));
+                }
             }
         }
     }
 
-    public void setMetadataValueBlocks() {
+    public void setMetadataValueBlocks(List<MetadataBlock> systemMDBlocks) {
         //TODO: A lot of clean up on the logic of this method
         metadataBlocksForView.clear();
         metadataBlocksForEdit.clear();
@@ -306,14 +307,16 @@ public void setMetadataValueBlocks() {
         }
         
         for (MetadataBlock mdb : editMDB) {
-            List<DatasetField> datasetFieldsForEdit = new ArrayList<>();
-            this.setDatasetFields(initDatasetFields());
-            for (DatasetField dsf : this.getDatasetFields() ) {
-                if (dsf.getDatasetFieldType().getMetadataBlock().equals(mdb)) { 
-                    datasetFieldsForEdit.add(dsf);
+            if (!systemMDBlocks.contains(mdb)) {
+                List<DatasetField> datasetFieldsForEdit = new ArrayList<>();
+                this.setDatasetFields(initDatasetFields());
+                for (DatasetField dsf : this.getDatasetFields()) {
+                    if (dsf.getDatasetFieldType().getMetadataBlock().equals(mdb)) {
+                        datasetFieldsForEdit.add(dsf);
+                    }
                 }
+                metadataBlocksForEdit.put(mdb, sortDatasetFields(datasetFieldsForEdit));
             }
-            metadataBlocksForEdit.put(mdb, sortDatasetFields(datasetFieldsForEdit));
         }
         
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java b/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java
index 98b24f84801..1d855e029ce 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TemplateConverter.java
@@ -6,12 +6,12 @@
 
 package edu.harvard.iq.dataverse;
 
-import javax.ejb.EJB;
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java
index 6da0d99da20..44070dcbb41 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TemplatePage.java
@@ -15,12 +15,12 @@
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.faces.application.FacesMessage;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
@@ -54,6 +54,9 @@ public class TemplatePage implements java.io.Serializable {
     @Inject
     LicenseServiceBean licenseServiceBean;
     
+    @Inject
+    SettingsWrapper settingsWrapper;
+    
     private static final Logger logger = Logger.getLogger(TemplatePage.class.getCanonicalName());
 
     public enum EditMode {
@@ -130,7 +133,7 @@ public String init() {
 
             template = templateService.find(templateId);
             template.setDataverse(dataverse);
-            template.setMetadataValueBlocks();
+            template.setMetadataValueBlocks(settingsWrapper.getSystemMetadataBlocks());
 
             if (template.getTermsOfUseAndAccess() != null) {
                 TermsOfUseAndAccess terms = template.getTermsOfUseAndAccess().copyTermsOfUseAndAccess();
@@ -143,8 +146,9 @@ public String init() {
             // create mode for a new template
 
             editMode = TemplatePage.EditMode.CREATE;
-            template = new Template(this.dataverse);
+            template = new Template(this.dataverse, settingsWrapper.getSystemMetadataBlocks());
             TermsOfUseAndAccess terms = new TermsOfUseAndAccess();
+            terms.setFileAccessRequest(true);
             terms.setTemplate(template);
             terms.setLicense(licenseServiceBean.getDefault());
             template.setTermsOfUseAndAccess(terms);
diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java
index f2ac8779d2d..46382fc2588 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java
@@ -2,14 +2,13 @@
 
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import java.util.List;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
index a8616283332..ee865770dbe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java
@@ -6,15 +6,15 @@
 package edu.harvard.iq.dataverse;
 
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToOne;
-import javax.persistence.Transient;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Transient;
 
 import edu.harvard.iq.dataverse.license.License;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
index 2cf78db0f03..ca38a305d63 100644
--- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccessValidator.java
@@ -6,8 +6,8 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java
index e270d3842f6..f30051e26ae 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java
@@ -7,6 +7,7 @@
 
 import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseThemeCommand;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.JsfHelper;
 import java.io.File;
@@ -14,20 +15,21 @@
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.nio.file.Files;
+import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.nio.file.StandardCopyOption;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.PreDestroy;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.html.HtmlInputText;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.ValidatorException;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PreDestroy;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.html.HtmlInputText;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.ValidatorException;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import org.apache.commons.lang3.StringUtils;
 import org.primefaces.PrimeFaces;
@@ -49,6 +51,8 @@ public class ThemeWidgetFragment implements java.io.Serializable {
     static final String DEFAULT_TEXT_COLOR = "888888";
     private static final Logger logger = Logger.getLogger(ThemeWidgetFragment.class.getCanonicalName());   
 
+    public static final String LOGOS_SUBDIR = "logos";
+    public static final String LOGOS_TEMP_SUBDIR = LOGOS_SUBDIR + File.separator + "temp";
 
     private File tempDir;
     private File uploadedFile;
@@ -86,12 +90,18 @@ public void setTaglineInput(HtmlInputText taglineInput) {
     }
 
  
-   
+    public static Path getLogoDir(String ownerId) {
+        return Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup(), LOGOS_SUBDIR, ownerId);
+    }
     
-    private  void createTempDir() {
+    private void createTempDir() {
           try {
-            File tempRoot = Files.createDirectories(Paths.get("../docroot/logos/temp")).toFile();
-            tempDir = Files.createTempDirectory(tempRoot.toPath(),editDv.getId().toString()).toFile();
+            // Create the temporary space if not yet existing (will silently ignore preexisting)
+            // Note that the docroot directory is checked within ConfigCheckService for presence and write access.
+            Path tempRoot = Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup(), LOGOS_TEMP_SUBDIR);
+            Files.createDirectories(tempRoot);
+            
+            this.tempDir = Files.createTempDirectory(tempRoot, editDv.getId().toString()).toFile();
         } catch (IOException e) {
             throw new RuntimeException("Error creating temp directory", e); // improve error handling
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
index 6c8db8c124b..ae81a9326c4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ThumbnailServiceWrapper.java
@@ -5,28 +5,19 @@
  */
 package edu.harvard.iq.dataverse;
 
-import edu.harvard.iq.dataverse.dataaccess.DataAccess;
-import edu.harvard.iq.dataverse.dataaccess.StorageIO;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
-import edu.harvard.iq.dataverse.dataset.DatasetUtil;
-import static edu.harvard.iq.dataverse.dataset.DatasetUtil.datasetLogoThumbnail;
+
 import edu.harvard.iq.dataverse.search.SolrSearchResult;
-import edu.harvard.iq.dataverse.util.FileUtil;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Base64;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+
 import java.util.HashMap;
 import java.util.Map;
-import javax.ejb.EJB;
-import javax.enterprise.context.RequestScoped;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import org.apache.commons.io.IOUtils;
+import java.util.logging.Logger;
+
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
@@ -36,6 +27,9 @@
 @RequestScoped
 @Named
 public class ThumbnailServiceWrapper implements java.io.Serializable  {
+    
+    private static final Logger logger = Logger.getLogger(ThumbnailServiceWrapper.class.getCanonicalName());
+    
     @Inject
     PermissionsWrapper permissionsWrapper;
     @EJB
@@ -49,49 +43,7 @@ public class ThumbnailServiceWrapper implements java.io.Serializable  {
     
     private Map<Long, String> dvobjectThumbnailsMap = new HashMap<>();
     private Map<Long, DvObject> dvobjectViewMap = new HashMap<>();
-
-    private String getAssignedDatasetImage(Dataset dataset, int size) {
-        if (dataset == null) {
-            return null;
-        }
-
-        DataFile assignedThumbnailFile = dataset.getThumbnailFile();
-
-        if (assignedThumbnailFile != null) {
-            Long assignedThumbnailFileId = assignedThumbnailFile.getId();
-
-            if (this.dvobjectThumbnailsMap.containsKey(assignedThumbnailFileId)) {
-                // Yes, return previous answer
-                //logger.info("using cached result for ... "+assignedThumbnailFileId);
-                if (!"".equals(this.dvobjectThumbnailsMap.get(assignedThumbnailFileId))) {
-                    return this.dvobjectThumbnailsMap.get(assignedThumbnailFileId);
-                }
-                return null;
-            }
-
-            String imageSourceBase64 = ImageThumbConverter.getImageThumbnailAsBase64(assignedThumbnailFile,
-                    size);
-                    //ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
-
-            if (imageSourceBase64 != null) {
-                this.dvobjectThumbnailsMap.put(assignedThumbnailFileId, imageSourceBase64);
-                return imageSourceBase64;
-            }
-
-            // OK - we can't use this "assigned" image, because of permissions, or because 
-            // the thumbnail failed to generate, etc... in this case we'll 
-            // mark this dataset in the lookup map - so that we don't have to
-            // do all these lookups again...
-            this.dvobjectThumbnailsMap.put(assignedThumbnailFileId, "");
-            
-            // TODO: (?)
-            // do we need to cache this datafile object in the view map?
-            // -- L.A., 4.2.2
-        }
-
-        return null;
-
-    }
+    private Map<Long, Boolean> hasThumbMap = new HashMap<>();
 
     // it's the responsibility of the user - to make sure the search result
     // passed to this method is of the Datafile type!
@@ -133,7 +85,7 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) {
 
             if ((!((DataFile)result.getEntity()).isRestricted()
                         || permissionsWrapper.hasDownloadFilePermission(result.getEntity()))
-                    && dataFileService.isThumbnailAvailable((DataFile) result.getEntity())) {
+                    && isThumbnailAvailable((DataFile) result.getEntity())) {
                 
                 cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64(
                         (DataFile) result.getEntity(),
@@ -159,9 +111,16 @@ public String getFileCardImageAsBase64Url(SolrSearchResult result) {
         return null;
     }
 
+    public boolean isThumbnailAvailable(DataFile entity) {
+        if(!hasThumbMap.containsKey(entity.getId())) {
+            hasThumbMap.put(entity.getId(), dataFileService.isThumbnailAvailable(entity));
+        }
+        return hasThumbMap.get(entity.getId());
+    }
+
     // it's the responsibility of the user - to make sure the search result
     // passed to this method is of the Dataset type!
-    public String getDatasetCardImageAsBase64Url(SolrSearchResult result) {
+    public String getDatasetCardImageAsUrl(SolrSearchResult result) {
         // Before we do anything else, check if it's a harvested dataset; 
         // no need to check anything else if so (harvested datasets never have 
         // thumbnails)
@@ -179,13 +138,14 @@ public String getDatasetCardImageAsBase64Url(SolrSearchResult result) {
             return null;
         }
         Dataset dataset = (Dataset)result.getEntity();
+        dataset.setId(result.getEntityId());
         
         Long versionId = result.getDatasetVersionId();
 
-        return getDatasetCardImageAsBase64Url(dataset, versionId, result.isPublishedState(), ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
+        return getDatasetCardImageAsUrl(dataset, versionId, result.isPublishedState(), ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
     }
     
-    public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, boolean autoselect, int size) {
+    public String getDatasetCardImageAsUrl(Dataset dataset, Long versionId, boolean autoselect, int size) {
         Long datasetId = dataset.getId();
         if (datasetId != null) {
             if (this.dvobjectThumbnailsMap.containsKey(datasetId)) {
@@ -206,112 +166,21 @@ public String getDatasetCardImageAsBase64Url(Dataset dataset, Long versionId, bo
             this.dvobjectThumbnailsMap.put(datasetId, "");
             return null; 
         }
-        
-        String cardImageUrl = null;
-        StorageIO<Dataset> dataAccess = null;
-                
-        try{
-            dataAccess = DataAccess.getStorageIO(dataset);
-        }
-        catch(IOException ioex){
-          // ignore
-        }
-        
-        InputStream in = null;
-        // See if the dataset already has a dedicated thumbnail ("logo") saved as
-        // an auxilary file on the dataset level: 
-        // (don't bother checking if it exists; just try to open the input stream)
-        try {
-                in = dataAccess.getAuxFileAsInputStream(datasetLogoThumbnail + ".thumb" + size);
-                        //thumb48addedByImageThumbConverter);
-        } catch (Exception ioex) {
-              //ignore
-        }
-        
-        if (in != null) {
-            try {
-                byte[] bytes = IOUtils.toByteArray(in);
-                String base64image = Base64.getEncoder().encodeToString(bytes);
-                cardImageUrl = FileUtil.DATA_URI_SCHEME + base64image;
-                this.dvobjectThumbnailsMap.put(datasetId, cardImageUrl);
-                return cardImageUrl;
-            } catch (IOException ex) {
-                this.dvobjectThumbnailsMap.put(datasetId, "");
-                return null; 
-                // (alternatively, we could ignore the exception, and proceed with the 
-                // regular process of selecting the thumbnail from the available 
-                // image files - ?)
-            } finally
-	    {
-		    IOUtils.closeQuietly(in);
-	    }
-        } 
+        DataFile thumbnailFile = dataset.getThumbnailFile();
 
-        // If not, see if the dataset has one of its image files already assigned
-        // to be the designated thumbnail:
-        cardImageUrl = this.getAssignedDatasetImage(dataset, size);
+        if (thumbnailFile == null) {
 
-        if (cardImageUrl != null) {
-            //logger.info("dataset id " + result.getEntity().getId() + " has a dedicated image assigned; returning " + cardImageUrl);
-            return cardImageUrl;
-        }
-        
-        // And finally, try to auto-select the thumbnail (unless instructed not to):
-        
-        if (!autoselect) {
-            return null;
-        }
-
-        // We attempt to auto-select via the optimized, native query-based method 
-        // from the DatasetVersionService:
-        Long thumbnailImageFileId = datasetVersionService.getThumbnailByVersionId(versionId);
-
-        if (thumbnailImageFileId != null) {
-            //cardImageUrl = FILE_CARD_IMAGE_URL + thumbnailImageFileId;
-            if (this.dvobjectThumbnailsMap.containsKey(thumbnailImageFileId)) {
-                // Yes, return previous answer
-                //logger.info("using cached result for ... "+datasetId);
-                if (!"".equals(this.dvobjectThumbnailsMap.get(thumbnailImageFileId))) {
-                    return this.dvobjectThumbnailsMap.get(thumbnailImageFileId);
-                }
+            // We attempt to auto-select via the optimized, native query-based method
+            // from the DatasetVersionService:
+            if (datasetVersionService.getThumbnailByVersionId(versionId) == null) {
                 return null;
             }
-
-            DataFile thumbnailImageFile = null;
-
-            if (dvobjectViewMap.containsKey(thumbnailImageFileId)
-                    && dvobjectViewMap.get(thumbnailImageFileId).isInstanceofDataFile()) {
-                thumbnailImageFile = (DataFile) dvobjectViewMap.get(thumbnailImageFileId);
-            } else {
-                thumbnailImageFile = dataFileService.findCheapAndEasy(thumbnailImageFileId);
-                if (thumbnailImageFile != null) {
-                    // TODO:
-                    // do we need this file on the map? - it may not even produce
-                    // a thumbnail!
-                    dvobjectViewMap.put(thumbnailImageFileId, thumbnailImageFile);
-                } else {
-                    this.dvobjectThumbnailsMap.put(thumbnailImageFileId, "");
-                    return null;
-                }
-            }
-
-            if (dataFileService.isThumbnailAvailable(thumbnailImageFile)) {
-                cardImageUrl = ImageThumbConverter.getImageThumbnailAsBase64(
-                        thumbnailImageFile,
-                        size);
-                        //ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
-            }
-
-            if (cardImageUrl != null) {
-                this.dvobjectThumbnailsMap.put(thumbnailImageFileId, cardImageUrl);
-            } else {
-                this.dvobjectThumbnailsMap.put(thumbnailImageFileId, "");
-            }
         }
 
-        //logger.info("dataset id " + result.getEntityId() + ", returning " + cardImageUrl);
-
-        return cardImageUrl;
+        String url = SystemConfig.getDataverseSiteUrlStatic() + "/api/datasets/" + dataset.getId() + "/logo";
+        logger.fine("getDatasetCardImageAsUrl: " + url);
+        this.dvobjectThumbnailsMap.put(datasetId,url);
+        return url;
     }
     
     // it's the responsibility of the user - to make sure the search result
@@ -323,6 +192,7 @@ public String getDataverseCardImageAsBase64Url(SolrSearchResult result) {
     public void resetObjectMaps() {
         dvobjectThumbnailsMap = new HashMap<>();
         dvobjectViewMap = new HashMap<>();
+        hasThumbMap = new HashMap<>();
     }
 
     
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java b/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java
index 7bd4f2d898f..888669ee615 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserBannerMessage.java
@@ -4,15 +4,15 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java
index b68a1b9d13e..280c2075494 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserNotification.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserNotification.java
@@ -12,17 +12,17 @@
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.stream.Collectors;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Transient;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
 
 /**
  *
@@ -39,7 +39,7 @@ public enum Type {
         CHECKSUMIMPORT, CHECKSUMFAIL, CONFIRMEMAIL, APIGENERATED, INGESTCOMPLETED, INGESTCOMPLETEDWITHERRORS, 
         PUBLISHFAILED_PIDREG, WORKFLOW_SUCCESS, WORKFLOW_FAILURE, STATUSUPDATED, DATASETCREATED, DATASETMENTIONED,
         GLOBUSUPLOADCOMPLETED, GLOBUSUPLOADCOMPLETEDWITHERRORS,
-        GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS;
+        GLOBUSDOWNLOADCOMPLETED, GLOBUSDOWNLOADCOMPLETEDWITHERRORS, REQUESTEDFILEACCESS;
         
         public String getDescription() {
             return BundleUtil.getStringFromBundle("notification.typeDescription." + this.name());
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
index 947ee3ce989..228e4b19c38 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserNotificationServiceBean.java
@@ -14,16 +14,16 @@
 import java.sql.Timestamp;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
@@ -131,6 +131,7 @@ public void sendNotification(AuthenticatedUser dataverseUser, Timestamp sendDate
             save(userNotification);
         }
     }
+    
 
     public boolean isEmailMuted(UserNotification userNotification) {
         final Type type = userNotification.getType();
diff --git a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
index 2d8ecf64f76..93892376edc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/UserServiceBean.java
@@ -12,16 +12,15 @@
 import java.util.Date;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 import org.apache.commons.lang3.StringUtils;
-import org.ocpsoft.common.util.Strings;
 
 @Stateless
 @Named
@@ -190,7 +189,7 @@ private HashMap<String, List<String>> retrieveRolesForUsers(List<Object[]> userO
         // Add '@' to each identifier and delimit the list by ","
         // -------------------------------------------------
         String identifierListString = userIdentifierList.stream()
-                                     .filter(x -> !Strings.isNullOrEmpty(x))
+                                     .filter(x -> x != null && !x.isEmpty())
                                      .map(x -> "'@" + x + "'")
                                      .collect(Collectors.joining(", "));
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java b/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java
index 94e33d6220a..9e8ce42491d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ValidateDataFileDirectoryName.java
@@ -10,8 +10,8 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java b/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java
index ae7b4a1eaef..f36a1d9541e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ValidateDatasetFieldType.java
@@ -13,8 +13,8 @@
 import java.lang.annotation.Retention;
 import java.lang.annotation.Target;
 
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 @Target({TYPE, ANNOTATION_TYPE})
 @Retention(RUNTIME)
diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java
index 8717d10fc8d..f55e93af674 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ValidateTermsOfUseAndAccess.java
@@ -10,8 +10,8 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java b/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java
index 405a7feb52f..c8d64d4a642 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ValidateVersionNote.java
@@ -11,8 +11,8 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java
index 6f6d0dfeee1..a8ea5fabde4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/WidgetWrapper.java
@@ -5,10 +5,10 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletResponse;
 
 /**
  *
@@ -62,7 +62,7 @@ public boolean isWidgetTarget(DvObject dvo) {
                         case "dataverse": 
                             break; // keep looping
                         case "dataset":
-                            if (((Dataset) dvo).getGlobalIdString().equals(widgetHome)) {
+                            if (((Dataset) dvo).getGlobalId().asString().equals(widgetHome)) {
                                 return true;
                             }   break;
                         default:
diff --git a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java
index 31a9ad25e5b..6743c3f2143 100644
--- a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java
+++ b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecord.java
@@ -3,16 +3,16 @@
 import java.util.Date;
 import java.util.Objects;
 import java.util.UUID;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.PrePersist;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.PrePersist;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  * Logs a single action in the action log.
diff --git a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java
index ba19fdd9eeb..2d16f52bb09 100644
--- a/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/actionlogging/ActionLogServiceBean.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse.actionlogging;
 
 import java.util.Date;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  * A service bean that persists {@link ActionLogRecord}s to the DB.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
index 51f6f05f326..58565bcc9d6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java
@@ -30,8 +30,6 @@
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.authorization.users.GuestUser;
-import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean;
 import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleServiceBean;
@@ -43,45 +41,43 @@
 import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean;
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
 import edu.harvard.iq.dataverse.metrics.MetricsServiceBean;
-import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.locality.StorageSiteServiceBean;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
-import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import edu.harvard.iq.dataverse.util.UrlSignerUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParser;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
-import java.io.StringReader;
+import java.io.InputStream;
 import java.net.URI;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.Enumeration;
 import java.util.UUID;
 import java.util.concurrent.Callable;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.json.JsonValue;
-import javax.json.JsonValue.ValueType;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonException;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.*;
+import jakarta.ws.rs.core.Response.ResponseBuilder;
+import jakarta.ws.rs.core.Response.Status;
+
 import static org.apache.commons.lang3.StringUtils.isNumeric;
 
 /**
@@ -94,10 +90,9 @@ public abstract class AbstractApiBean {
     private static final String DATAVERSE_KEY_HEADER_NAME = "X-Dataverse-key";
     private static final String PERSISTENT_ID_KEY=":persistentId";
     private static final String ALIAS_KEY=":alias";
-    public static final String STATUS_ERROR = "ERROR";
-    public static final String STATUS_OK = "OK";
     public static final String STATUS_WF_IN_PROGRESS = "WORKFLOW_IN_PROGRESS";
     public static final String DATAVERSE_WORKFLOW_INVOCATION_HEADER_NAME = "X-Dataverse-invocationID";
+    public static final String RESPONSE_MESSAGE_AUTHENTICATED_USER_REQUIRED = "Only authenticated users can perform the requested operation";
 
     /**
      * Utility class to convey a proper error response using Java's exceptions.
@@ -139,23 +134,21 @@ public Response refineResponse( String message ) {
          * In the common case of the wrapped response being of type JSON,
          * return the message field it has (if any).
          * @return the content of a message field, or {@code null}.
+         * @throws JsonException when JSON parsing fails.
          */
         String getWrappedMessageWhenJson() {
             if ( response.getMediaType().equals(MediaType.APPLICATION_JSON_TYPE) ) {
                 Object entity = response.getEntity();
                 if ( entity == null ) return null;
 
-                String json = entity.toString();
-                try ( StringReader rdr = new StringReader(json) ){
-                    JsonReader jrdr = Json.createReader(rdr);
-                    JsonObject obj = jrdr.readObject();
-                    if ( obj.containsKey("message") ) {
-                        JsonValue message = obj.get("message");
-                        return message.getValueType() == ValueType.STRING ? obj.getString("message") : message.toString();
-                    } else {
-                        return null;
-                    }
+                JsonObject obj = JsonUtil.getJsonObject(entity.toString());
+                if ( obj.containsKey("message") ) {
+                    JsonValue message = obj.get("message");
+                    return message.getValueType() == ValueType.STRING ? obj.getString("message") : message.toString();
+                } else {
+                    return null;
                 }
+
             } else {
                 return null;
             }
@@ -165,6 +158,9 @@ String getWrappedMessageWhenJson() {
     @EJB
     protected EjbDataverseEngine engineSvc;
 
+    @EJB
+    protected DvObjectServiceBean dvObjectSvc;
+    
     @EJB
     protected DatasetServiceBean datasetSvc;
     
@@ -210,9 +206,6 @@ String getWrappedMessageWhenJson() {
     @EJB
     protected SavedSearchServiceBean savedSearchSvc;
 
-    @EJB
-    protected PrivateUrlServiceBean privateUrlSvc;
-
     @EJB
     protected ConfirmEmailServiceBean confirmEmailSvc;
 
@@ -279,7 +272,7 @@ public JsonParser call() throws Exception {
     /**
      * Functional interface for handling HTTP requests in the APIs.
      *
-     * @see #response(edu.harvard.iq.dataverse.api.AbstractApiBean.DataverseRequestHandler)
+     * @see #response(edu.harvard.iq.dataverse.api.AbstractApiBean.DataverseRequestHandler, edu.harvard.iq.dataverse.authorization.users.User)
      */
     protected static interface DataverseRequestHandler {
         Response handle( DataverseRequest u ) throws WrappedResponse;
@@ -321,12 +314,30 @@ protected String getRequestApiKey() {
                 
         return headerParamApiKey!=null ? headerParamApiKey : queryParamApiKey;
     }
-    
-    protected String getRequestWorkflowInvocationID() {
-        String headerParamWFKey = httpRequest.getHeader(DATAVERSE_WORKFLOW_INVOCATION_HEADER_NAME);
-        String queryParamWFKey = httpRequest.getParameter("invocationID");
-                
-        return headerParamWFKey!=null ? headerParamWFKey : queryParamWFKey;
+
+    protected User getRequestUser(ContainerRequestContext crc) {
+        return (User) crc.getProperty(ApiConstants.CONTAINER_REQUEST_CONTEXT_USER);
+    }
+
+    /**
+     * Gets the authenticated user from the ContainerRequestContext user property. If the user from the property
+     * is not authenticated, throws a wrapped "authenticated user required" user (HTTP UNAUTHORIZED) response.
+     * @param crc a ContainerRequestContext implementation
+     * @return The authenticated user
+     * @throws edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse in case the user is not authenticated.
+     *
+     * TODO:
+     *  This method is designed to comply with existing authorization logic, based on the old findAuthenticatedUserOrDie method.
+     *  Ideally, as for authentication, a filter could be implemented for authorization, which would extract and encapsulate the
+     *  authorization logic from the AbstractApiBean.
+     */
+    protected AuthenticatedUser getRequestAuthenticatedUserOrDie(ContainerRequestContext crc) throws WrappedResponse {
+        User requestUser = (User) crc.getProperty(ApiConstants.CONTAINER_REQUEST_CONTEXT_USER);
+        if (requestUser.isAuthenticated()) {
+            return (AuthenticatedUser) requestUser;
+        } else {
+            throw new WrappedResponse(authenticatedUserRequired());
+        }
     }
 
     /* ========= *\
@@ -347,111 +358,13 @@ protected RoleAssignee findAssignee(String identifier) {
     }
 
     /**
-     *
      * @param apiKey the key to find the user with
      * @return the user, or null
-     * @see #findUserOrDie(java.lang.String)
      */
     protected AuthenticatedUser findUserByApiToken( String apiKey ) {
         return authSvc.lookupUser(apiKey);
     }
 
-    /**
-     * Returns the user of pointed by the API key, or the guest user
-     * @return a user, may be a guest user.
-     * @throws edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse iff there is an api key present, but it is invalid.
-     */
-    protected User findUserOrDie() throws WrappedResponse {
-        final String requestApiKey = getRequestApiKey();
-        final String requestWFKey = getRequestWorkflowInvocationID();
-        if (requestApiKey == null && requestWFKey == null && getRequestParameter(UrlSignerUtil.SIGNED_URL_TOKEN)==null) {
-            return GuestUser.get();
-        }
-        PrivateUrlUser privateUrlUser = privateUrlSvc.getPrivateUrlUserFromToken(requestApiKey);
-        // For privateUrlUsers restricted to anonymized access, all api calls are off-limits except for those used in the UI
-        // to download the file or image thumbs
-        if (privateUrlUser != null) {
-            if (privateUrlUser.hasAnonymizedAccess()) {
-                String pathInfo = httpRequest.getPathInfo();
-                String prefix= "/access/datafile/";
-                if (!(pathInfo.startsWith(prefix) && !pathInfo.substring(prefix.length()).contains("/"))) {
-                    logger.info("Anonymized access request for " + pathInfo);
-                    throw new WrappedResponse(error(Status.UNAUTHORIZED, "API Access not allowed with this Key"));
-                }
-            }
-            return privateUrlUser;
-        }
-        return findAuthenticatedUserOrDie(requestApiKey, requestWFKey);
-    }
-
-    /**
-     * Finds the authenticated user, based on (in order):
-     * <ol>
-     *  <li>The key in the HTTP header {@link #DATAVERSE_KEY_HEADER_NAME}</li>
-     *  <li>The key in the query parameter {@code key}
-     * </ol>
-     *
-     * If no user is found, throws a wrapped bad api key (HTTP UNAUTHORIZED) response.
-     *
-     * @return The authenticated user which owns the passed api key
-     * @throws edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse in case said user is not found.
-     */
-    protected AuthenticatedUser findAuthenticatedUserOrDie() throws WrappedResponse {
-        return findAuthenticatedUserOrDie(getRequestApiKey(), getRequestWorkflowInvocationID());
-    }
-
-
-    private AuthenticatedUser findAuthenticatedUserOrDie( String key, String wfid ) throws WrappedResponse {
-        if (key != null) {
-            // No check for deactivated user because it's done in authSvc.lookupUser.
-            AuthenticatedUser authUser = authSvc.lookupUser(key);
-
-            if (authUser != null) {
-                authUser = userSvc.updateLastApiUseTime(authUser);
-
-                return authUser;
-            }
-            else {
-                throw new WrappedResponse(badApiKey(key));
-            }
-        } else if (wfid != null) {
-            AuthenticatedUser authUser = authSvc.lookupUserForWorkflowInvocationID(wfid);
-            if (authUser != null) {
-                return authUser;
-            } else {
-                throw new WrappedResponse(badWFKey(wfid));
-            }
-        } else if (getRequestParameter(UrlSignerUtil.SIGNED_URL_TOKEN) != null) {
-            AuthenticatedUser authUser = getAuthenticatedUserFromSignedUrl();
-            if (authUser != null) {
-                return authUser;
-            }
-        }
-        //Just send info about the apiKey - workflow users will learn about invocationId elsewhere
-        throw new WrappedResponse(badApiKey(null));
-    }
-    
-    private AuthenticatedUser getAuthenticatedUserFromSignedUrl() {
-        AuthenticatedUser authUser = null;
-        // The signedUrl contains a param telling which user this is supposed to be for.
-        // We don't trust this. So we lookup that user, and get their API key, and use
-        // that as a secret in validating the signedURL. If the signature can't be
-        // validated with their key, the user (or their API key) has been changed and
-        // we reject the request.
-        // ToDo - add null checks/ verify that calling methods catch things.
-        String user = httpRequest.getParameter("user");
-        AuthenticatedUser targetUser = authSvc.getAuthenticatedUser(user);
-        String key = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("")
-                + authSvc.findApiTokenByUser(targetUser).getTokenString();
-        String signedUrl = httpRequest.getRequestURL().toString() + "?" + httpRequest.getQueryString();
-        String method = httpRequest.getMethod();
-        boolean validated = UrlSignerUtil.isValidUrl(signedUrl, user, method, key);
-        if (validated) {
-            authUser = targetUser;
-        }
-        return authUser;
-    }
-
     protected Dataverse findDataverseOrDie( String dvIdtf ) throws WrappedResponse {
         Dataverse dv = findDataverse(dvIdtf);
         if ( dv == null ) {
@@ -703,49 +616,39 @@ protected Response response( Callable<Response> hdl ) {
         } catch ( WrappedResponse rr ) {
             return rr.getResponse();
         } catch ( Exception ex ) {
-            String incidentId = UUID.randomUUID().toString();
-            logger.log(Level.SEVERE, "API internal error " + incidentId +": " + ex.getMessage(), ex);
-            return Response.status(500)
-                .entity( Json.createObjectBuilder()
-                             .add("status", "ERROR")
-                             .add("code", 500)
-                             .add("message", "Internal server error. More details available at the server logs.")
-                             .add("incidentId", incidentId)
-                        .build())
-                .type("application/json").build();
+            return handleDataverseRequestHandlerException(ex);
         }
     }
 
-    /**
-     * The preferred way of handling a request that requires a user. The system
-     * looks for the user and, if found, handles it to the handler for doing the
-     * actual work.
-     *
-     * This is a relatively secure way to handle things, since if the user is not
-     * found, the response is about the bad API key, rather than something else
-     * (say, 404 NOT FOUND which leaks information about the existence of the
-     * sought object).
+    /***
+     * The preferred way of handling a request that requires a user. The method
+     * receives a user and handles it to the handler for doing the actual work.
      *
      * @param hdl handling code block.
+     * @param user the associated request user.
      * @return HTTP Response appropriate for the way {@code hdl} executed.
      */
-    protected Response response( DataverseRequestHandler hdl ) {
+    protected Response response(DataverseRequestHandler hdl, User user) {
         try {
-            return hdl.handle(createDataverseRequest(findUserOrDie()));
+            return hdl.handle(createDataverseRequest(user));
         } catch ( WrappedResponse rr ) {
             return rr.getResponse();
         } catch ( Exception ex ) {
-            String incidentId = UUID.randomUUID().toString();
-            logger.log(Level.SEVERE, "API internal error " + incidentId +": " + ex.getMessage(), ex);
-            return Response.status(500)
-                .entity( Json.createObjectBuilder()
-                             .add("status", "ERROR")
-                             .add("code", 500)
-                             .add("message", "Internal server error. More details available at the server logs.")
-                             .add("incidentId", incidentId)
+            return handleDataverseRequestHandlerException(ex);
+        }
+    }
+
+    private Response handleDataverseRequestHandlerException(Exception ex) {
+        String incidentId = UUID.randomUUID().toString();
+        logger.log(Level.SEVERE, "API internal error " + incidentId +": " + ex.getMessage(), ex);
+        return Response.status(500)
+                .entity(Json.createObjectBuilder()
+                        .add("status", "ERROR")
+                        .add("code", 500)
+                        .add("message", "Internal server error. More details available at the server logs.")
+                        .add("incidentId", incidentId)
                         .build())
                 .type("application/json").build();
-        }
     }
 
     /* ====================== *\
@@ -754,21 +657,21 @@ protected Response response( DataverseRequestHandler hdl ) {
 
     protected Response ok( JsonArrayBuilder bld ) {
         return Response.ok(Json.createObjectBuilder()
-            .add("status", STATUS_OK)
+            .add("status", ApiConstants.STATUS_OK)
             .add("data", bld).build())
             .type(MediaType.APPLICATION_JSON).build();
     }
     
     protected Response ok( JsonArray ja ) {
         return Response.ok(Json.createObjectBuilder()
-            .add("status", STATUS_OK)
+            .add("status", ApiConstants.STATUS_OK)
             .add("data", ja).build())
             .type(MediaType.APPLICATION_JSON).build();
     }
 
     protected Response ok( JsonObjectBuilder bld ) {
         return Response.ok( Json.createObjectBuilder()
-            .add("status", STATUS_OK)
+            .add("status", ApiConstants.STATUS_OK)
             .add("data", bld).build() )
             .type(MediaType.APPLICATION_JSON)
             .build();
@@ -776,7 +679,7 @@ protected Response ok( JsonObjectBuilder bld ) {
     
     protected Response ok( JsonObject jo ) {
         return Response.ok( Json.createObjectBuilder()
-                .add("status", STATUS_OK)
+                .add("status", ApiConstants.STATUS_OK)
                 .add("data", jo).build() )
                 .type(MediaType.APPLICATION_JSON)
                 .build();    
@@ -784,7 +687,7 @@ protected Response ok( JsonObject jo ) {
 
     protected Response ok( String msg ) {
         return Response.ok().entity(Json.createObjectBuilder()
-            .add("status", STATUS_OK)
+            .add("status", ApiConstants.STATUS_OK)
             .add("data", Json.createObjectBuilder().add("message",msg)).build() )
             .type(MediaType.APPLICATION_JSON)
             .build();
@@ -792,7 +695,7 @@ protected Response ok( String msg ) {
     
     protected Response ok( String msg, JsonObjectBuilder bld  ) {
         return Response.ok().entity(Json.createObjectBuilder()
-            .add("status", STATUS_OK)
+            .add("status", ApiConstants.STATUS_OK)
             .add("message", Json.createObjectBuilder().add("message",msg))     
             .add("data", bld).build())      
             .type(MediaType.APPLICATION_JSON)
@@ -801,10 +704,16 @@ protected Response ok( String msg, JsonObjectBuilder bld  ) {
 
     protected Response ok( boolean value ) {
         return Response.ok().entity(Json.createObjectBuilder()
-            .add("status", STATUS_OK)
+            .add("status", ApiConstants.STATUS_OK)
             .add("data", value).build() ).build();
     }
 
+    protected Response ok(long value) {
+        return Response.ok().entity(Json.createObjectBuilder()
+                .add("status", ApiConstants.STATUS_OK)
+                .add("data", value).build()).build();
+    }
+
     /**
      * @param data Payload to return.
      * @param mediaType Non-JSON media type.
@@ -819,6 +728,11 @@ protected Response ok(String data, MediaType mediaType, String downloadFilename)
         return res.build();
     }
 
+    protected Response ok(InputStream inputStream) {
+        ResponseBuilder res = Response.ok().entity(inputStream).type(MediaType.valueOf(FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT));
+        return res.build();
+    }
+
     protected Response created( String uri, JsonObjectBuilder bld ) {
         return Response.created( URI.create(uri) )
                 .entity( Json.createObjectBuilder()
@@ -858,16 +772,11 @@ protected Response forbidden( String msg ) {
     protected Response conflict( String msg ) {
         return error( Status.CONFLICT, msg );
     }
-    
-    protected Response badApiKey( String apiKey ) {
-        return error(Status.UNAUTHORIZED, (apiKey != null ) ? "Bad api key " : "Please provide a key query parameter (?key=XXX) or via the HTTP header " + DATAVERSE_KEY_HEADER_NAME);
-    }
 
-    protected Response badWFKey( String wfId ) {
-        String message = (wfId != null ) ? "Bad workflow invocationId " : "Please provide an invocationId query parameter (?invocationId=XXX) or via the HTTP header " + DATAVERSE_WORKFLOW_INVOCATION_HEADER_NAME;
-        return error(Status.UNAUTHORIZED, message );
+    protected Response authenticatedUserRequired() {
+        return error(Status.UNAUTHORIZED, RESPONSE_MESSAGE_AUTHENTICATED_USER_REQUIRED);
     }
-    
+
     protected Response permissionError( PermissionException pe ) {
         return permissionError( pe.getMessage() );
     }
@@ -883,7 +792,7 @@ protected Response unauthorized( String message ) {
     protected static Response error( Status sts, String msg ) {
         return Response.status(sts)
                 .entity( NullSafeJsonBuilder.jsonObjectBuilder()
-                        .add("status", STATUS_ERROR)
+                        .add("status", ApiConstants.STATUS_ERROR)
                         .add( "message", msg ).build()
                 ).type(MediaType.APPLICATION_JSON_TYPE).build();
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
index 3bd0a19672b..297ec2d3681 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
@@ -10,6 +10,7 @@
 import edu.harvard.iq.dataverse.AuxiliaryFileServiceBean;
 import edu.harvard.iq.dataverse.DataCitation;
 import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.FileAccessRequest;
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.DataFileServiceBean;
 import edu.harvard.iq.dataverse.Dataset;
@@ -30,8 +31,11 @@
 import edu.harvard.iq.dataverse.RoleAssignment;
 import edu.harvard.iq.dataverse.UserNotification;
 import edu.harvard.iq.dataverse.UserNotificationServiceBean;
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
+import edu.harvard.iq.dataverse.ThemeWidgetFragment;
+
 import static edu.harvard.iq.dataverse.api.Datasets.handleVersion;
+
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
@@ -43,6 +47,7 @@
 import edu.harvard.iq.dataverse.dataaccess.DataAccessRequest;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
 import edu.harvard.iq.dataverse.dataaccess.DataFileZipper;
+import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
 import edu.harvard.iq.dataverse.dataaccess.OptionalAccessService;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
@@ -70,7 +75,7 @@
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 
 import java.util.logging.Logger;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import java.io.InputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -85,46 +90,46 @@
 import java.util.List;
 import java.util.Properties;
 import java.util.logging.Level;
-import javax.inject.Inject;
-import javax.json.Json;
+import jakarta.inject.Inject;
+import jakarta.json.Json;
 import java.net.URI;
-import javax.json.JsonArrayBuilder;
-import javax.persistence.TypedQuery;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.UriInfo;
-
-
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.ForbiddenException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import javax.ws.rs.core.StreamingOutput;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.persistence.TypedQuery;
+
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.UriInfo;
+
+
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.ForbiddenException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.ServiceUnavailableException;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.ws.rs.core.StreamingOutput;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import java.net.URISyntaxException;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.RedirectionException;
-import javax.ws.rs.ServerErrorException;
-import javax.ws.rs.core.MediaType;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.RedirectionException;
+import jakarta.ws.rs.ServerErrorException;
+import jakarta.ws.rs.core.MediaType;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataParam;
 
@@ -191,10 +196,11 @@ public class Access extends AbstractApiBean {
     
     // TODO: 
     // versions? -- L.A. 4.0 beta 10
-    @Path("datafile/bundle/{fileId}")
     @GET
+    @AuthRequired
+    @Path("datafile/bundle/{fileId}")
     @Produces({"application/zip"})
-    public BundleDownloadInstance datafileBundle(@PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId,@QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
+    public BundleDownloadInstance datafileBundle(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId,@QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
  
 
         GuestbookResponse gbr = null;
@@ -202,12 +208,12 @@ public BundleDownloadInstance datafileBundle(@PathParam("fileId") String fileId,
         DataFile df = findDataFileOrDieWrapper(fileId);
         
         // This will throw a ForbiddenException if access isn't authorized: 
-        checkAuthorization(df);
+        checkAuthorization(getRequestUser(crc), df);
         
         if (gbrecs != true && df.isReleased()){
             // Write Guestbook record if not done previously and file is released
             //This calls findUserOrDie which will retrieve the key param or api token header, or the workflow token header.
-            User apiTokenUser = findAPITokenUser();
+            User apiTokenUser = findAPITokenUser(getRequestUser(crc));
             gbr = guestbookResponseService.initAPIGuestbookResponse(df.getOwner(), df, session, apiTokenUser);
             guestbookResponseService.save(gbr);
             MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, df);
@@ -269,10 +275,11 @@ private DataFile findDataFileOrDieWrapper(String fileId){
     }
         
             
-    @Path("datafile/{fileId:.+}")
     @GET
+    @AuthRequired
+    @Path("datafile/{fileId:.+}")
     @Produces({"application/xml"})
-    public Response datafile(@PathParam("fileId") String fileId, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
+    public Response datafile(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
         
         // check first if there's a trailing slash, and chop it: 
         while (fileId.lastIndexOf('/') == fileId.length() - 1) {
@@ -299,11 +306,11 @@ public Response datafile(@PathParam("fileId") String fileId, @QueryParam("gbrecs
         }
                
         // This will throw a ForbiddenException if access isn't authorized: 
-        checkAuthorization(df);
+        checkAuthorization(getRequestUser(crc), df);
 
         if (gbrecs != true && df.isReleased()){
             // Write Guestbook record if not done previously and file is released
-            User apiTokenUser = findAPITokenUser();
+            User apiTokenUser = findAPITokenUser(getRequestUser(crc));
             gbr = guestbookResponseService.initAPIGuestbookResponse(df.getOwner(), df, session, apiTokenUser);
         }
 
@@ -322,8 +329,8 @@ public Response datafile(@PathParam("fileId") String fileId, @QueryParam("gbrecs
             dInfo.addServiceAvailable(new OptionalAccessService("preprocessed", "application/json", "format=prep", "Preprocessed data in JSON"));
             dInfo.addServiceAvailable(new OptionalAccessService("subset", "text/tab-separated-values", "variables=&lt;LIST&gt;", "Column-wise Subsetting"));
         }
-        
-        if(systemConfig.isGlobusFileDownload() && systemConfig.getGlobusStoresList().contains(DataAccess.getStorageDriverFromIdentifier(df.getStorageIdentifier()))) {
+        String driverId = DataAccess.getStorageDriverFromIdentifier(df.getStorageIdentifier());
+        if(systemConfig.isGlobusFileDownload() && (GlobusAccessibleStore.acceptsGlobusTransfers(driverId) || GlobusAccessibleStore.allowsGlobusReferences(driverId))) {
             dInfo.addServiceAvailable(new OptionalAccessService("GlobusTransfer", df.getContentType(), "format=GlobusTransfer", "Download via Globus"));
         }
         
@@ -432,11 +439,12 @@ public Response datafile(@PathParam("fileId") String fileId, @QueryParam("gbrecs
     
     
     // Metadata format defaults to DDI:
-    @Path("datafile/{fileId}/metadata")
     @GET
+    @AuthRequired
+    @Path("datafile/{fileId}/metadata")
     @Produces({"text/xml"})
-    public String tabularDatafileMetadata(@PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ {
-        return tabularDatafileMetadataDDI(fileId, fileMetadataId, exclude, include, header, response);
+    public String tabularDatafileMetadata(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ {
+        return tabularDatafileMetadataDDI(crc, fileId, fileMetadataId, exclude, include, header, response);
     }
     
     /* 
@@ -444,9 +452,10 @@ public String tabularDatafileMetadata(@PathParam("fileId") String fileId, @Query
      * which we are going to retire.
      */
     @Path("datafile/{fileId}/metadata/ddi")
+    @AuthRequired
     @GET
     @Produces({"text/xml"})
-    public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId,  @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ {
+    public String tabularDatafileMetadataDDI(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @QueryParam("fileMetadataId") Long fileMetadataId, @QueryParam("exclude") String exclude, @QueryParam("include") String include, @Context HttpHeaders header, @Context HttpServletResponse response) throws NotFoundException, ServiceUnavailableException /*, PermissionDeniedException, AuthorizationRequiredException*/ {
         String retValue = "";
 
         DataFile dataFile = null; 
@@ -461,11 +470,7 @@ public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId,  @Q
         if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) {
             boolean hasPermissionToDownloadFile = false;
             DataverseRequest dataverseRequest;
-            try {
-                dataverseRequest = createDataverseRequest(findUserOrDie());
-            } catch (WrappedResponse ex) {
-                throw new BadRequestException("cannot find user");
-            }
+            dataverseRequest = createDataverseRequest(getRequestUser(crc));
             if (dataverseRequest != null && dataverseRequest.getUser() instanceof GuestUser) {
                 // We must be in the UI. Try to get a non-GuestUser from the session.
                 dataverseRequest = dvRequestService.getDataverseRequest();
@@ -519,30 +524,34 @@ public String tabularDatafileMetadataDDI(@PathParam("fileId") String fileId,  @Q
      * a tabular datafile.
      */
     
-    @Path("datafile/{fileId}/auxiliary")
     @GET
-    public Response listDatafileMetadataAux(@PathParam("fileId") String fileId,
-            @Context UriInfo uriInfo,
-            @Context HttpHeaders headers,
-            @Context HttpServletResponse response) throws ServiceUnavailableException {
-        return listAuxiliaryFiles(fileId, null, uriInfo, headers, response);
+    @AuthRequired
+    @Path("datafile/{fileId}/auxiliary")
+    public Response listDatafileMetadataAux(@Context ContainerRequestContext crc,
+                                            @PathParam("fileId") String fileId,
+                                            @Context UriInfo uriInfo,
+                                            @Context HttpHeaders headers,
+                                            @Context HttpServletResponse response) throws ServiceUnavailableException {
+        return listAuxiliaryFiles(getRequestUser(crc), fileId, null, uriInfo, headers, response);
     }
     /*
      * GET method for retrieving a list auxiliary files associated with
      * a tabular datafile and having the specified origin.
      */
-    
-    @Path("datafile/{fileId}/auxiliary/{origin}")
+
     @GET
-    public Response listDatafileMetadataAuxByOrigin(@PathParam("fileId") String fileId,
-            @PathParam("origin") String origin,
-            @Context UriInfo uriInfo,
-            @Context HttpHeaders headers,
-            @Context HttpServletResponse response) throws ServiceUnavailableException {
-        return listAuxiliaryFiles(fileId, origin, uriInfo, headers, response);
+    @AuthRequired
+    @Path("datafile/{fileId}/auxiliary/{origin}")
+    public Response listDatafileMetadataAuxByOrigin(@Context ContainerRequestContext crc,
+                                                    @PathParam("fileId") String fileId,
+                                                    @PathParam("origin") String origin,
+                                                    @Context UriInfo uriInfo,
+                                                    @Context HttpHeaders headers,
+                                                    @Context HttpServletResponse response) throws ServiceUnavailableException {
+        return listAuxiliaryFiles(getRequestUser(crc), fileId, origin, uriInfo, headers, response);
     } 
     
-    private Response listAuxiliaryFiles(String fileId, String origin, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response) {
+    private Response listAuxiliaryFiles(User user, String fileId, String origin, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response) {
           DataFile df = findDataFileOrDieWrapper(fileId);
 
         List<AuxiliaryFile> auxFileList = auxiliaryFileService.findAuxiliaryFiles(df, origin);
@@ -550,7 +559,7 @@ private Response listAuxiliaryFiles(String fileId, String origin, UriInfo uriInf
         if (auxFileList == null || auxFileList.isEmpty()) {
             throw new NotFoundException("No Auxiliary files exist for datafile " + fileId + (origin==null ? "": " and the specified origin"));
         }
-        boolean isAccessAllowed = isAccessAuthorized(df);
+        boolean isAccessAllowed = isAccessAuthorized(user, df);
         JsonArrayBuilder jab = Json.createArrayBuilder();
         auxFileList.forEach(auxFile -> {
             if (isAccessAllowed || auxFile.getIsPublic()) {
@@ -573,14 +582,16 @@ private Response listAuxiliaryFiles(String fileId, String origin, UriInfo uriInf
      *
      */
     
+    @GET
+    @AuthRequired
     @Path("datafile/{fileId}/auxiliary/{formatTag}/{formatVersion}")
-    @GET    
-    public DownloadInstance downloadAuxiliaryFile(@PathParam("fileId") String fileId,
-            @PathParam("formatTag") String formatTag,
-            @PathParam("formatVersion") String formatVersion,
-            @Context UriInfo uriInfo, 
-            @Context HttpHeaders headers, 
-            @Context HttpServletResponse response) throws ServiceUnavailableException {
+    public DownloadInstance downloadAuxiliaryFile(@Context ContainerRequestContext crc,
+                                                  @PathParam("fileId") String fileId,
+                                                  @PathParam("formatTag") String formatTag,
+                                                  @PathParam("formatVersion") String formatVersion,
+                                                  @Context UriInfo uriInfo,
+                                                  @Context HttpHeaders headers,
+                                                  @Context HttpServletResponse response) throws ServiceUnavailableException {
     
         DataFile df = findDataFileOrDieWrapper(fileId);
         
@@ -633,7 +644,7 @@ public DownloadInstance downloadAuxiliaryFile(@PathParam("fileId") String fileId
         // as defined for the DataFile itself), and will throw a ForbiddenException 
         // if access is denied:
         if (!publiclyAvailable) {
-            checkAuthorization(df);
+            checkAuthorization(getRequestUser(crc), df);
         }
         
         return downloadInstance;
@@ -645,22 +656,24 @@ public DownloadInstance downloadAuxiliaryFile(@PathParam("fileId") String fileId
     
     // TODO: Rather than only supporting looking up files by their database IDs,
     // consider supporting persistent identifiers.
-    @Path("datafiles")
     @POST
+    @AuthRequired
+    @Path("datafiles")
     @Consumes("text/plain")
     @Produces({ "application/zip" })
-    public Response postDownloadDatafiles(String fileIds, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WebApplicationException {
+    public Response postDownloadDatafiles(@Context ContainerRequestContext crc, String fileIds, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WebApplicationException {
         
 
-        return downloadDatafiles(fileIds, gbrecs, uriInfo, headers, response);
+        return downloadDatafiles(getRequestUser(crc), fileIds, gbrecs, uriInfo, headers, response);
     }
 
-    @Path("dataset/{id}")
     @GET
+    @AuthRequired
+    @Path("dataset/{id}")
     @Produces({"application/zip"})
-    public Response downloadAllFromLatest(@PathParam("id") String datasetIdOrPersistentId, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WebApplicationException {
+    public Response downloadAllFromLatest(@Context ContainerRequestContext crc, @PathParam("id") String datasetIdOrPersistentId, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WebApplicationException {
         try {
-            User user = findUserOrDie(); 
+            User user = getRequestUser(crc);
             DataverseRequest req = createDataverseRequest(user);
             final Dataset retrieved = findDatasetOrDie(datasetIdOrPersistentId);
             if (!(user instanceof GuestUser)) {
@@ -672,7 +685,7 @@ public Response downloadAllFromLatest(@PathParam("id") String datasetIdOrPersist
                     // We don't want downloads from Draft versions to be counted, 
                     // so we are setting the gbrecs (aka "do not write guestbook response") 
                     // variable accordingly:
-                    return downloadDatafiles(fileIds, true, uriInfo, headers, response);
+                    return downloadDatafiles(getRequestUser(crc), fileIds, true, uriInfo, headers, response);
                 }
             }
             
@@ -693,18 +706,19 @@ public Response downloadAllFromLatest(@PathParam("id") String datasetIdOrPersist
             }
             
             String fileIds = getFileIdsAsCommaSeparated(latest.getFileMetadatas());
-            return downloadDatafiles(fileIds, gbrecs, uriInfo, headers, response);
+            return downloadDatafiles(getRequestUser(crc), fileIds, gbrecs, uriInfo, headers, response);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
     }
 
-    @Path("dataset/{id}/versions/{versionId}")
     @GET
+    @AuthRequired
+    @Path("dataset/{id}/versions/{versionId}")
     @Produces({"application/zip"})
-    public Response downloadAllFromVersion(@PathParam("id") String datasetIdOrPersistentId, @PathParam("versionId") String versionId, @QueryParam("gbrecs") boolean gbrecs, @QueryParam("key") String apiTokenParam, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WebApplicationException {
+    public Response downloadAllFromVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetIdOrPersistentId, @PathParam("versionId") String versionId, @QueryParam("gbrecs") boolean gbrecs, @QueryParam("key") String apiTokenParam, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WebApplicationException {
         try {
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
             final Dataset ds = execCommand(new GetDatasetCommand(req, findDatasetOrDie(datasetIdOrPersistentId)));
             DatasetVersion dsv = execCommand(handleVersion(versionId, new Datasets.DsVersionHandler<Command<DatasetVersion>>() {
 
@@ -742,7 +756,7 @@ public Command<DatasetVersion> handleLatestPublished() {
             if (dsv.isDraft()) {
                 gbrecs = true;
             }
-            return downloadDatafiles(fileIds, gbrecs, uriInfo, headers, response);
+            return downloadDatafiles(getRequestUser(crc), fileIds, gbrecs, uriInfo, headers, response);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
@@ -760,14 +774,15 @@ private static String getFileIdsAsCommaSeparated(List<FileMetadata> fileMetadata
     /*
      * API method for downloading zipped bundles of multiple files:
      */
-    @Path("datafiles/{fileIds}")
     @GET
+    @AuthRequired
+    @Path("datafiles/{fileIds}")
     @Produces({"application/zip"})
-    public Response datafiles(@PathParam("fileIds") String fileIds, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WebApplicationException {
-        return downloadDatafiles(fileIds, gbrecs, uriInfo, headers, response);
+    public Response datafiles(@Context ContainerRequestContext crc, @PathParam("fileIds") String fileIds, @QueryParam("gbrecs") boolean gbrecs, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WebApplicationException {
+        return downloadDatafiles(getRequestUser(crc), fileIds, gbrecs, uriInfo, headers, response);
     }
 
-    private Response downloadDatafiles(String rawFileIds, boolean donotwriteGBResponse, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response) throws WebApplicationException /* throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
+    private Response downloadDatafiles(User user, String rawFileIds, boolean donotwriteGBResponse, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response) throws WebApplicationException /* throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/ {
         final long zipDownloadSizeLimit = systemConfig.getZipDownloadLimit();
                 
         logger.fine("setting zip download size limit to " + zipDownloadSizeLimit + " bytes.");
@@ -789,7 +804,7 @@ private Response downloadDatafiles(String rawFileIds, boolean donotwriteGBRespon
         String customZipServiceUrl = settingsService.getValueForKey(SettingsServiceBean.Key.CustomZipDownloadServiceUrl);
         boolean useCustomZipService = customZipServiceUrl != null; 
         
-        User apiTokenUser = findAPITokenUser(); //for use in adding gb records if necessary
+        User apiTokenUser = findAPITokenUser(user); //for use in adding gb records if necessary
         
         Boolean getOrig = false;
         for (String key : uriInfo.getQueryParameters().keySet()) {
@@ -802,7 +817,7 @@ private Response downloadDatafiles(String rawFileIds, boolean donotwriteGBRespon
         if (useCustomZipService) {
             URI redirect_uri = null; 
             try {
-                redirect_uri = handleCustomZipDownload(customZipServiceUrl, fileIds, apiTokenUser, uriInfo, headers, donotwriteGBResponse, true); 
+                redirect_uri = handleCustomZipDownload(user, customZipServiceUrl, fileIds, apiTokenUser, uriInfo, headers, donotwriteGBResponse, true);
             } catch (WebApplicationException wae) {
                 throw wae;
             }
@@ -842,8 +857,8 @@ public void write(OutputStream os) throws IOException,
                             logger.fine("attempting to look up file id " + fileId);
                             DataFile file = dataFileService.find(fileId);
                             if (file != null) {
-                                if (isAccessAuthorized(file)) { 
-                                    
+                                if (isAccessAuthorized(user, file)) {
+
                                     logger.fine("adding datafile (id=" + file.getId() + ") to the download list of the ZippedDownloadInstance.");
                                     //downloadInstance.addDataFile(file);
                                     if (donotwriteGBResponse != true && file.isReleased()){
@@ -1183,16 +1198,7 @@ private File getLogo(Dataverse dataverse) {
         
         DataverseTheme theme = dataverse.getDataverseTheme(); 
         if (theme != null && theme.getLogo() != null && !theme.getLogo().equals("")) {
-            Properties p = System.getProperties();
-            String domainRoot = p.getProperty("com.sun.aas.instanceRoot");
-  
-            if (domainRoot != null && !"".equals(domainRoot)) {
-                return new File (domainRoot + File.separator + 
-                    "docroot" + File.separator + 
-                    "logos" + File.separator + 
-                    dataverse.getLogoOwnerId() + File.separator + 
-                    theme.getLogo());
-            }
+            return ThemeWidgetFragment.getLogoDir(dataverse.getLogoOwnerId()).resolve(theme.getLogo()).toFile();
         }
             
         return null;         
@@ -1235,23 +1241,22 @@ private String getWebappImageResource(String imageName) {
      * @return 
      *
      */
-    @Path("datafile/{fileId}/auxiliary/{formatTag}/{formatVersion}")
     @POST
+    @AuthRequired
+    @Path("datafile/{fileId}/auxiliary/{formatTag}/{formatVersion}")
     @Consumes(MediaType.MULTIPART_FORM_DATA)
-
-    public Response saveAuxiliaryFileWithVersion(@PathParam("fileId") Long fileId,
-            @PathParam("formatTag") String formatTag,
-            @PathParam("formatVersion") String formatVersion,
-            @FormDataParam("origin") String origin,
-            @FormDataParam("isPublic") boolean isPublic,
-            @FormDataParam("type") String type,
-            @FormDataParam("file") final FormDataBodyPart formDataBodyPart,
-            @FormDataParam("file") InputStream fileInputStream
-          
-    ) {
+    public Response saveAuxiliaryFileWithVersion(@Context ContainerRequestContext crc,
+                                                 @PathParam("fileId") Long fileId,
+                                                 @PathParam("formatTag") String formatTag,
+                                                 @PathParam("formatVersion") String formatVersion,
+                                                 @FormDataParam("origin") String origin,
+                                                 @FormDataParam("isPublic") boolean isPublic,
+                                                 @FormDataParam("type") String type,
+                                                 @FormDataParam("file") final FormDataBodyPart formDataBodyPart,
+                                                 @FormDataParam("file") InputStream fileInputStream) {
         AuthenticatedUser authenticatedUser;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(FORBIDDEN, "Authorized users only.");
         }
@@ -1293,14 +1298,16 @@ public Response saveAuxiliaryFileWithVersion(@PathParam("fileId") Long fileId,
      * @param formDataBodyPart
      * @return 
      */
-    @Path("datafile/{fileId}/auxiliary/{formatTag}/{formatVersion}")
     @DELETE
-    public Response deleteAuxiliaryFileWithVersion(@PathParam("fileId") Long fileId,
-            @PathParam("formatTag") String formatTag,
-            @PathParam("formatVersion") String formatVersion) {
+    @AuthRequired
+    @Path("datafile/{fileId}/auxiliary/{formatTag}/{formatVersion}")
+    public Response deleteAuxiliaryFileWithVersion(@Context ContainerRequestContext crc,
+                                                   @PathParam("fileId") Long fileId,
+                                                   @PathParam("formatTag") String formatTag,
+                                                   @PathParam("formatVersion") String formatVersion) {
         AuthenticatedUser authenticatedUser;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(FORBIDDEN, "Authorized users only.");
         }
@@ -1337,8 +1344,9 @@ public Response deleteAuxiliaryFileWithVersion(@PathParam("fileId") Long fileId,
      * @return
      */
     @PUT
+    @AuthRequired
     @Path("{id}/allowAccessRequest")
-    public Response allowAccessRequest(@PathParam("id") String datasetToAllowAccessId, String requestStr) {
+    public Response allowAccessRequest(@Context ContainerRequestContext crc, @PathParam("id") String datasetToAllowAccessId, String requestStr) {
 
         DataverseRequest dataverseRequest = null;
         Dataset dataset;
@@ -1352,12 +1360,7 @@ public Response allowAccessRequest(@PathParam("id") String datasetToAllowAccessI
 
         boolean allowRequest = Boolean.valueOf(requestStr);
 
-        try {
-            dataverseRequest = createDataverseRequest(findUserOrDie());
-        } catch (WrappedResponse wr) {
-            List<String> args = Arrays.asList(wr.getLocalizedMessage());
-            return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.failure.noUser", args));
-        }
+        dataverseRequest = createDataverseRequest(getRequestUser(crc));
 
         dataset.getOrCreateEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(allowRequest);
 
@@ -1379,14 +1382,15 @@ public Response allowAccessRequest(@PathParam("id") String datasetToAllowAccessI
      *
      * @author sekmiller
      *
+     * @param crc
      * @param fileToRequestAccessId
-     * @param apiToken
      * @param headers
      * @return
      */
     @PUT
+    @AuthRequired
     @Path("/datafile/{id}/requestAccess")
-    public Response requestFileAccess(@PathParam("id") String fileToRequestAccessId, @Context HttpHeaders headers) {
+    public Response requestFileAccess(@Context ContainerRequestContext crc, @PathParam("id") String fileToRequestAccessId, @Context HttpHeaders headers) {
         
         DataverseRequest dataverseRequest;
         DataFile dataFile;
@@ -1405,18 +1409,18 @@ public Response requestFileAccess(@PathParam("id") String fileToRequestAccessId,
         AuthenticatedUser requestor;
 
         try {
-            requestor = findAuthenticatedUserOrDie();
+            requestor = getRequestAuthenticatedUserOrDie(crc);
             dataverseRequest = createDataverseRequest(requestor);
         } catch (WrappedResponse wr) {
             List<String> args = Arrays.asList(wr.getLocalizedMessage());
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.failure.noUser", args));
         }
         //Already have access
-        if (isAccessAuthorized(dataFile)) {
+        if (isAccessAuthorized(getRequestUser(crc), dataFile)) {
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.failure.invalidRequest"));
         }
 
-        if (dataFile.getFileAccessRequesters().contains(requestor)) {
+        if (dataFile.containsActiveFileAccessRequestFromUser(requestor)) {
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.failure.requestExists"));
         }
 
@@ -1443,8 +1447,9 @@ public Response requestFileAccess(@PathParam("id") String fileToRequestAccessId,
      * @return
      */
     @GET
+    @AuthRequired
     @Path("/datafile/{id}/listRequests")
-    public Response listFileAccessRequests(@PathParam("id") String fileToRequestAccessId, @Context HttpHeaders headers) {
+    public Response listFileAccessRequests(@Context ContainerRequestContext crc, @PathParam("id") String fileToRequestAccessId, @Context HttpHeaders headers) {
 
         DataverseRequest dataverseRequest;
 
@@ -1457,7 +1462,7 @@ public Response listFileAccessRequests(@PathParam("id") String fileToRequestAcce
         }
 
         try {
-            dataverseRequest = createDataverseRequest(findAuthenticatedUserOrDie());
+            dataverseRequest = createDataverseRequest(getRequestAuthenticatedUserOrDie(crc));
         } catch (WrappedResponse wr) {
             List<String> args = Arrays.asList(wr.getLocalizedMessage());
             return error(UNAUTHORIZED, BundleUtil.getStringFromBundle("access.api.fileAccess.failure.noUser", args));
@@ -1466,17 +1471,17 @@ public Response listFileAccessRequests(@PathParam("id") String fileToRequestAcce
             return error(FORBIDDEN, BundleUtil.getStringFromBundle("access.api.rejectAccess.failure.noPermissions"));
         }
 
-        List<AuthenticatedUser> requesters = dataFile.getFileAccessRequesters();
+        List<FileAccessRequest> requests = dataFile.getFileAccessRequests(FileAccessRequest.RequestState.CREATED);
 
-        if (requesters == null || requesters.isEmpty()) {
+        if (requests == null || requests.isEmpty()) {
             List<String> args = Arrays.asList(dataFile.getDisplayName());
-            return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestList.noRequestsFound", args));
+            return error(Response.Status.NOT_FOUND, BundleUtil.getStringFromBundle("access.api.requestList.noRequestsFound", args));
         }
 
         JsonArrayBuilder userArray = Json.createArrayBuilder();
 
-        for (AuthenticatedUser au : requesters) {
-            userArray.add(json(au));
+        for (FileAccessRequest fileAccessRequest : requests) {
+            userArray.add(json(fileAccessRequest.getRequester()));
         }
 
         return ok(userArray);
@@ -1488,15 +1493,16 @@ public Response listFileAccessRequests(@PathParam("id") String fileToRequestAcce
      *
      * @author sekmiller
      *
+     * @param crc
      * @param fileToRequestAccessId
      * @param identifier
-     * @param apiToken
      * @param headers
      * @return
      */
     @PUT
+    @AuthRequired
     @Path("/datafile/{id}/grantAccess/{identifier}")
-    public Response grantFileAccess(@PathParam("id") String fileToRequestAccessId, @PathParam("identifier") String identifier, @Context HttpHeaders headers) {
+    public Response grantFileAccess(@Context ContainerRequestContext crc, @PathParam("id") String fileToRequestAccessId, @PathParam("identifier") String identifier, @Context HttpHeaders headers) {
         
         DataverseRequest dataverseRequest;
         DataFile dataFile;
@@ -1515,18 +1521,15 @@ public Response grantFileAccess(@PathParam("id") String fileToRequestAccessId, @
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.grantAccess.noAssigneeFound", args));
         }
 
-        try {
-            dataverseRequest = createDataverseRequest(findUserOrDie());
-        } catch (WrappedResponse wr) {
-            List<String> args = Arrays.asList(identifier);
-            return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.failure.noUser", args));
-        }
+        dataverseRequest = createDataverseRequest(getRequestUser(crc));
 
         DataverseRole fileDownloaderRole = roleService.findBuiltinRoleByAlias(DataverseRole.FILE_DOWNLOADER);
 
         try {
             engineSvc.submit(new AssignRoleCommand(ra, fileDownloaderRole, dataFile, dataverseRequest, null));
-            if (dataFile.getFileAccessRequesters().remove(ra)) {
+            FileAccessRequest far = dataFile.getAccessRequestForAssignee(ra);
+            if(far!=null) {
+                far.setStateGranted();
                 dataFileService.save(dataFile);
             }
 
@@ -1552,15 +1555,16 @@ public Response grantFileAccess(@PathParam("id") String fileToRequestAccessId, @
      *
      * @author sekmiller
      *
+     * @param crc
      * @param fileToRequestAccessId
      * @param identifier
-     * @param apiToken
      * @param headers
      * @return
      */
     @DELETE
+    @AuthRequired
     @Path("/datafile/{id}/revokeAccess/{identifier}")
-    public Response revokeFileAccess(@PathParam("id") String fileToRequestAccessId, @PathParam("identifier") String identifier, @Context HttpHeaders headers) {
+    public Response revokeFileAccess(@Context ContainerRequestContext crc, @PathParam("id") String fileToRequestAccessId, @PathParam("identifier") String identifier, @Context HttpHeaders headers) {
 
         DataverseRequest dataverseRequest;
         DataFile dataFile;
@@ -1572,12 +1576,7 @@ public Response revokeFileAccess(@PathParam("id") String fileToRequestAccessId,
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.fileNotFound", args));
         }
 
-        try {
-            dataverseRequest = createDataverseRequest(findUserOrDie());
-        } catch (WrappedResponse wr) {
-            List<String> args = Arrays.asList(wr.getLocalizedMessage());
-            return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.failure.noUser", args));
-        }
+        dataverseRequest = createDataverseRequest(getRequestUser(crc));
 
         if (identifier == null || identifier.equals("")) {
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.requestAccess.noKey"));
@@ -1622,15 +1621,16 @@ public Response revokeFileAccess(@PathParam("id") String fileToRequestAccessId,
      *
      * @author sekmiller
      *
+     * @param crc
      * @param fileToRequestAccessId
      * @param identifier
-     * @param apiToken
      * @param headers
      * @return
      */
     @PUT
+    @AuthRequired
     @Path("/datafile/{id}/rejectAccess/{identifier}")
-    public Response rejectFileAccess(@PathParam("id") String fileToRequestAccessId, @PathParam("identifier") String identifier, @Context HttpHeaders headers) {
+    public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathParam("id") String fileToRequestAccessId, @PathParam("identifier") String identifier, @Context HttpHeaders headers) {
 
         DataverseRequest dataverseRequest;
         DataFile dataFile;
@@ -1649,49 +1649,85 @@ public Response rejectFileAccess(@PathParam("id") String fileToRequestAccessId,
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.grantAccess.noAssigneeFound", args));
         }
 
-        try {
-            dataverseRequest = createDataverseRequest(findUserOrDie());
-        } catch (WrappedResponse wr) {
-            List<String> args = Arrays.asList(identifier);
-            return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.failure.noUser", args));
-        }
-        
+        dataverseRequest = createDataverseRequest(getRequestUser(crc));
+
         if (!(dataverseRequest.getAuthenticatedUser().isSuperuser() || permissionService.requestOn(dataverseRequest, dataFile).has(Permission.ManageFilePermissions))) {
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.rejectAccess.failure.noPermissions"));
         }
-
-        if (dataFile.getFileAccessRequesters().contains(ra)) {
-            dataFile.getFileAccessRequesters().remove(ra);
+        FileAccessRequest far = dataFile.getAccessRequestForAssignee(ra);
+        if (far != null) {
+            far.setStateRejected();
             dataFileService.save(dataFile);
 
             try {
                 AuthenticatedUser au = (AuthenticatedUser) ra;
-                userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.REJECTFILEACCESS, dataFile.getOwner().getId());
+                userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()),
+                        UserNotification.Type.REJECTFILEACCESS, dataFile.getOwner().getId());
             } catch (ClassCastException e) {
-                //nothing to do here - can only send a notification to an authenticated user
+                // nothing to do here - can only send a notification to an authenticated user
             }
 
             List<String> args = Arrays.asList(dataFile.getDisplayName());
             return ok(BundleUtil.getStringFromBundle("access.api.rejectAccess.success.for.single.file", args));
-
         } else {
             List<String> args = Arrays.asList(dataFile.getDisplayName(), ra.getDisplayInfo().getTitle());
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.rejectFailure.noRequest", args));
         }
     }
-    
+
+    @GET
+    @AuthRequired
+    @Path("/datafile/{id}/userFileAccessRequested")
+    public Response getUserFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) {
+        DataFile dataFile;
+        AuthenticatedUser requestAuthenticatedUser;
+        try {
+            dataFile = findDataFileOrDie(dataFileId);
+            requestAuthenticatedUser = getRequestAuthenticatedUserOrDie(crc);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        boolean fileAccessRequested = false;
+        List<FileAccessRequest> requests = dataFile.getFileAccessRequests();
+        for (FileAccessRequest fileAccessRequest : requests) {
+            if (fileAccessRequest.getRequester().getId().equals(requestAuthenticatedUser.getId())) {
+                fileAccessRequested = true;
+                break;
+            }
+        }
+        return ok(fileAccessRequested);
+    }
+
+    @GET
+    @AuthRequired
+    @Path("/datafile/{id}/userPermissions")
+    public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) {
+        DataFile dataFile;
+        try {
+            dataFile = findDataFileOrDie(dataFileId);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        User requestUser = getRequestUser(crc);
+        jsonObjectBuilder.add("canDownloadFile", permissionService.userOn(requestUser, dataFile).has(Permission.DownloadFile));
+        jsonObjectBuilder.add("canManageFilePermissions", permissionService.userOn(requestUser, dataFile).has(Permission.ManageFilePermissions));
+        jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset));
+        return ok(jsonObjectBuilder);
+    }
+
     // checkAuthorization is a convenience method; it calls the boolean method
     // isAccessAuthorized(), the actual workhorse, tand throws a 403 exception if not.
     
-    private void checkAuthorization(DataFile df) throws WebApplicationException {
+    private void checkAuthorization(User user, DataFile df) throws WebApplicationException {
 
-        if (!isAccessAuthorized(df)) {
+        if (!isAccessAuthorized(user, df)) {
             throw new ForbiddenException();
         }        
     }
     
 
-    private boolean isAccessAuthorized(DataFile df) {
+    private boolean isAccessAuthorized(User requestUser, DataFile df) {
     // First, check if the file belongs to a released Dataset version: 
         
         boolean published = false; 
@@ -1773,34 +1809,26 @@ private boolean isAccessAuthorized(DataFile df) {
         }
         
         //For permissions check decide if we have a session user, or an API user
-        User user = null;
+        User sessionUser = null;
         
         /** 
          * Authentication/authorization:
          */
-        
-        User apiTokenUser = null;
+
+        User apiUser = requestUser;
 
         /*
-         * The logic looks for an apitoken authenticated user and uses it if it exists.
-         * If not, and a session user exists, we use that. If the apitoken method
-         * indicates a GuestUser, we will use that if there's no session.
+         * If API user is not authenticated, and a session user exists, we use that.
+         * If the API user indicates a GuestUser, we will use that if there's no session.
          * 
          * This is currently the only API call that supports sessions. If the rest of
          * the API is opened up, the custom logic here wouldn't be needed.
          */
 
-        try {
-            logger.fine("calling apiTokenUser = findUserOrDie()...");
-            apiTokenUser = findUserOrDie();
-        } catch (WrappedResponse wr) {
-            logger.log(Level.FINE, "Message from findUserOrDie(): {0}", wr.getMessage());
-        }
-        
-        if ((apiTokenUser instanceof GuestUser) && session != null) {
+        if ((apiUser instanceof GuestUser) && session != null) {
             if (session.getUser() != null) {
-                user = session.getUser();
-                apiTokenUser=null;
+                sessionUser = session.getUser();
+                apiUser = null;
                 //Fine logging
                 if (!session.getUser().isAuthenticated()) {
                     logger.fine("User associated with the session is not an authenticated user.");
@@ -1818,7 +1846,7 @@ private boolean isAccessAuthorized(DataFile df) {
             logger.fine("Session is null.");
         } 
         //If we don't have a user, nothing more to do. (Note session could have returned GuestUser)
-        if (user == null && apiTokenUser == null) {
+        if (sessionUser == null && apiUser == null) {
             logger.warning("Unable to find a user via session or with a token.");
             return false;
         }
@@ -1831,8 +1859,8 @@ private boolean isAccessAuthorized(DataFile df) {
          */
 
         DataverseRequest dvr = null;
-        if (apiTokenUser != null) {
-            dvr = createDataverseRequest(apiTokenUser);
+        if (apiUser != null) {
+            dvr = createDataverseRequest(apiUser);
         } else {
             // used in JSF context, user may be Guest
             dvr = dvRequestService.getDataverseRequest();
@@ -1856,42 +1884,35 @@ private boolean isAccessAuthorized(DataFile df) {
                 return true;
             }
         }
-        if (user != null) {
-            logger.log(Level.FINE, "Session-based auth: user {0} has NO access rights on the requested datafile.", user.getIdentifier());
+        if (sessionUser != null) {
+            logger.log(Level.FINE, "Session-based auth: user {0} has NO access rights on the requested datafile.", sessionUser.getIdentifier());
         } 
         
-        if (apiTokenUser != null) {
-            logger.log(Level.FINE, "Token-based auth: user {0} has NO access rights on the requested datafile.", apiTokenUser.getIdentifier());
+        if (apiUser != null) {
+            logger.log(Level.FINE, "Token-based auth: user {0} has NO access rights on the requested datafile.", apiUser.getIdentifier());
         } 
         return false; 
     }   
     
 
         
-    private User findAPITokenUser() {
-        User apiTokenUser = null;
-        try {
-            logger.fine("calling apiTokenUser = findUserOrDie()...");
-            apiTokenUser = findUserOrDie();
-            /*
-             * The idea here is to not let a guest user returned from findUserOrDie (which
-             * happens when there is no key/token, and which we want if there's no session)
-             * from overriding an authenticated session user.
-             */
-            if(apiTokenUser instanceof GuestUser) {
-                if(session!=null && session.getUser()!=null) {
-                //The apiTokenUser, if set, will override the sessionUser in permissions calcs, so set it to null if we have a session user
-                apiTokenUser=null;
-                }
+    private User findAPITokenUser(User requestUser) {
+        User apiTokenUser = requestUser;
+        /*
+         * The idea here is to not let a guest user coming from the request (which
+         * happens when there is no key/token, and which we want if there's no session)
+         * from overriding an authenticated session user.
+         */
+        if(apiTokenUser instanceof GuestUser) {
+            if(session!=null && session.getUser()!=null) {
+            //The apiTokenUser, if set, will override the sessionUser in permissions calcs, so set it to null if we have a session user
+            apiTokenUser=null;
             }
-            return apiTokenUser;
-        } catch (WrappedResponse wr) {
-            logger.log(Level.FINE, "Message from findUserOrDie(): {0}", wr.getMessage());
-            return null;
         }
+        return apiTokenUser;
     }
 
-    private URI handleCustomZipDownload(String customZipServiceUrl, String fileIds, User apiTokenUser, UriInfo uriInfo, HttpHeaders headers, boolean donotwriteGBResponse, boolean orig) throws WebApplicationException {
+    private URI handleCustomZipDownload(User user, String customZipServiceUrl, String fileIds, User apiTokenUser, UriInfo uriInfo, HttpHeaders headers, boolean donotwriteGBResponse, boolean orig) throws WebApplicationException {
         
         String zipServiceKey = null; 
         Timestamp timestamp = null; 
@@ -1917,7 +1938,7 @@ private URI handleCustomZipDownload(String customZipServiceUrl, String fileIds,
                 DataFile file = dataFileService.find(fileId);
                 if (file != null) {
                     validFileCount++;
-                    if (isAccessAuthorized(file)) {
+                    if (isAccessAuthorized(user, file)) {
                         logger.fine("adding datafile (id=" + file.getId() + ") to the download list of the ZippedDownloadInstance.");
                         if (donotwriteGBResponse != true && file.isReleased()) {
                             GuestbookResponse gbr = guestbookResponseService.initAPIGuestbookResponse(file.getOwner(), file, session, apiTokenUser);
@@ -1961,5 +1982,5 @@ private URI handleCustomZipDownload(String customZipServiceUrl, String fileIds,
             throw new BadRequestException(); 
         }
         return redirectUri;
-    }   
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 2c147b94243..48f9e19d835 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -14,10 +14,11 @@
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.DataverseSession;
 import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.validation.EMailValidator;
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
-import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.Template;
 import edu.harvard.iq.dataverse.TemplateServiceBean;
 import edu.harvard.iq.dataverse.UserServiceBean;
@@ -46,17 +47,19 @@
 import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
 import edu.harvard.iq.dataverse.settings.Setting;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 
 import java.io.InputStream;
@@ -65,14 +68,14 @@
 import java.util.Map.Entry;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Response.Status;
 
 import org.apache.commons.io.IOUtils;
 
@@ -93,7 +96,6 @@
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteTemplateCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RegisterDvObjectCommand;
-import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.userdata.UserListMaker;
@@ -102,8 +104,10 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.UrlSignerUtil;
 
+import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 
@@ -113,12 +117,13 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
-import javax.inject.Inject;
-import javax.json.JsonArray;
-import javax.persistence.Query;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.StreamingOutput;
+import jakarta.inject.Inject;
+import jakarta.json.JsonArray;
+import jakarta.persistence.Query;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.StreamingOutput;
+import java.nio.file.Paths;
 
 /**
  * Where the secure, setup API calls live.
@@ -516,10 +521,11 @@ public Response publishDataverseAsCreator(@PathParam("id") long id) {
 
 	@Deprecated
 	@GET
+	@AuthRequired
 	@Path("authenticatedUsers")
-	public Response listAuthenticatedUsers() {
+	public Response listAuthenticatedUsers(@Context ContainerRequestContext crc) {
 		try {
-			AuthenticatedUser user = findAuthenticatedUserOrDie();
+			AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
 			if (!user.isSuperuser()) {
 				return error(Response.Status.FORBIDDEN, "Superusers only.");
 			}
@@ -534,22 +540,18 @@ public Response listAuthenticatedUsers() {
 	}
 
 	@GET
+	@AuthRequired
 	@Path(listUsersPartialAPIPath)
 	@Produces({ "application/json" })
 	public Response filterAuthenticatedUsers(
+			@Context ContainerRequestContext crc,
 			@QueryParam("searchTerm") String searchTerm,
 			@QueryParam("selectedPage") Integer selectedPage,
 			@QueryParam("itemsPerPage") Integer itemsPerPage,
 			@QueryParam("sortKey") String sortKey
 	) {
 
-		User authUser;
-		try {
-			authUser = this.findUserOrDie();
-		} catch (AbstractApiBean.WrappedResponse ex) {
-			return error(Response.Status.FORBIDDEN,
-					BundleUtil.getStringFromBundle("dashboard.list_users.api.auth.invalid_apikey"));
-		}
+		User authUser = getRequestUser(crc);
 
 		if (!authUser.isSuperuser()) {
 			return error(Response.Status.FORBIDDEN,
@@ -602,11 +604,12 @@ public Response createAuthenicatedUser(JsonObject jsonObject) {
 	 *             Shib-specfic one.
 	 */
 	@PUT
+	@AuthRequired
 	@Path("authenticatedUsers/id/{id}/convertShibToBuiltIn")
 	@Deprecated
-	public Response convertShibUserToBuiltin(@PathParam("id") Long id, String newEmailAddress) {
-                try {
-                        AuthenticatedUser user = findAuthenticatedUserOrDie();
+	public Response convertShibUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) {
+		try {
+			AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
 			if (!user.isSuperuser()) {
 				return error(Response.Status.FORBIDDEN, "Superusers only.");
 			}
@@ -639,10 +642,11 @@ public Response convertShibUserToBuiltin(@PathParam("id") Long id, String newEma
 	}
 
 	@PUT
+	@AuthRequired
 	@Path("authenticatedUsers/id/{id}/convertRemoteToBuiltIn")
-	public Response convertOAuthUserToBuiltin(@PathParam("id") Long id, String newEmailAddress) {
-                try {
-			AuthenticatedUser user = findAuthenticatedUserOrDie();
+	public Response convertOAuthUserToBuiltin(@Context ContainerRequestContext crc, @PathParam("id") Long id, String newEmailAddress) {
+		try {
+			AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
 			if (!user.isSuperuser()) {
 				return error(Response.Status.FORBIDDEN, "Superusers only.");
 			}
@@ -679,12 +683,13 @@ public Response convertOAuthUserToBuiltin(@PathParam("id") Long id, String newEm
 	 * This is used in testing via AdminIT.java but we don't expect sysadmins to use
 	 * this.
 	 */
-	@Path("authenticatedUsers/convert/builtin2shib")
 	@PUT
-	public Response builtin2shib(String content) {
+	@AuthRequired
+	@Path("authenticatedUsers/convert/builtin2shib")
+	public Response builtin2shib(@Context ContainerRequestContext crc, String content) {
 		logger.info("entering builtin2shib...");
 		try {
-			AuthenticatedUser userToRunThisMethod = findAuthenticatedUserOrDie();
+			AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc);
 			if (!userToRunThisMethod.isSuperuser()) {
 				return error(Response.Status.FORBIDDEN, "Superusers only.");
 			}
@@ -829,12 +834,13 @@ public Response builtin2shib(String content) {
 	 * This is used in testing via AdminIT.java but we don't expect sysadmins to use
 	 * this.
 	 */
-	@Path("authenticatedUsers/convert/builtin2oauth")
 	@PUT
-	public Response builtin2oauth(String content) {
+	@AuthRequired
+	@Path("authenticatedUsers/convert/builtin2oauth")
+	public Response builtin2oauth(@Context ContainerRequestContext crc, String content) {
 		logger.info("entering builtin2oauth...");
 		try {
-			AuthenticatedUser userToRunThisMethod = findAuthenticatedUserOrDie();
+			AuthenticatedUser userToRunThisMethod = getRequestAuthenticatedUserOrDie(crc);
 			if (!userToRunThisMethod.isSuperuser()) {
 				return error(Response.Status.FORBIDDEN, "Superusers only.");
 			}
@@ -1009,14 +1015,15 @@ public Response listBuiltinRoles() {
 	}
 
     @DELETE
+	@AuthRequired
     @Path("roles/{id}")
-    public Response deleteRole(@PathParam("id") String id) {
+    public Response deleteRole(@Context ContainerRequestContext crc, @PathParam("id") String id) {
 
         return response(req -> {
             DataverseRole doomed = findRoleOrDie(id);
             execCommand(new DeleteRoleCommand(req, doomed));
             return ok("role " + doomed.getName() + " deleted.");
-        });
+        }, getRequestUser(crc));
     }
 
 	@Path("superuser/{identifier}")
@@ -1323,23 +1330,20 @@ public Response convertUserFromBcryptToSha1(String json) {
 	}
 
 	@Path("permissions/{dvo}")
+	@AuthRequired
 	@GET
-	public Response findPermissonsOn(@PathParam("dvo") String dvo) {
+	public Response findPermissonsOn(@Context ContainerRequestContext crc, @PathParam("dvo") String dvo) {
 		try {
 			DvObject dvObj = findDvo(dvo);
 			if (dvObj == null) {
 				return notFound("DvObject " + dvo + " not found");
 			}
-			try {
-				User aUser = findUserOrDie();
-				JsonObjectBuilder bld = Json.createObjectBuilder();
-				bld.add("user", aUser.getIdentifier());
-				bld.add("permissions", json(permissionSvc.permissionsFor(createDataverseRequest(aUser), dvObj)));
-				return ok(bld);
-
-			} catch (WrappedResponse wr) {
-				return wr.getResponse();
-			}
+			User aUser = getRequestUser(crc);
+			JsonObjectBuilder bld = Json.createObjectBuilder();
+			bld.add("user", aUser.getIdentifier());
+			bld.add("permissions", json(permissionSvc.permissionsFor(createDataverseRequest(aUser), dvObj)));
+			return ok(bld);
+
 		} catch (Exception e) {
 			logger.log(Level.SEVERE, "Error while testing permissions", e);
 			return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage());
@@ -1373,7 +1377,7 @@ public Response fixMissingOriginalTypes() {
 					"All the tabular files in the database already have the original types set correctly; exiting.");
 		} else {
 			for (Long fileid : affectedFileIds) {
-				logger.info("found file id: " + fileid);
+				logger.fine("found file id: " + fileid);
 			}
 			info.add("message", "Found " + affectedFileIds.size()
 					+ " tabular files with missing original types. Kicking off an async job that will repair the files in the background.");
@@ -1465,16 +1469,17 @@ public Response isOrcidEnabled() {
 	}
 
     @POST
+	@AuthRequired
     @Path("{id}/reregisterHDLToPID")
-    public Response reregisterHdlToPID(@PathParam("id") String id) {
+    public Response reregisterHdlToPID(@Context ContainerRequestContext crc, @PathParam("id") String id) {
         logger.info("Starting to reregister  " + id + " Dataset Id. (from hdl to doi)" + new Date());
         try {
-            if (settingsSvc.get(SettingsServiceBean.Key.Protocol.toString()).equals(GlobalId.HDL_PROTOCOL)) {
+            if (settingsSvc.get(SettingsServiceBean.Key.Protocol.toString()).equals(HandlenetServiceBean.HDL_PROTOCOL)) {
                 logger.info("Bad Request protocol set to handle  " );
                 return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("admin.api.migrateHDL.failure.must.be.set.for.doi"));
             }
             
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             if (!u.isSuperuser()) {
                 logger.info("Bad Request Unauthor " );
                 return error(Status.UNAUTHORIZED, BundleUtil.getStringFromBundle("admin.api.auth.mustBeSuperUser"));
@@ -1482,7 +1487,7 @@ public Response reregisterHdlToPID(@PathParam("id") String id) {
 
             DataverseRequest r = createDataverseRequest(u);
             Dataset ds = findDatasetOrDie(id);
-            if (ds.getIdentifier() != null && !ds.getIdentifier().isEmpty() && ds.getProtocol().equals(GlobalId.HDL_PROTOCOL)) {
+            if (ds.getIdentifier() != null && !ds.getIdentifier().isEmpty() && ds.getProtocol().equals(HandlenetServiceBean.HDL_PROTOCOL)) {
                 execCommand(new RegisterDvObjectCommand(r, ds, true));
             } else {
                 return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("admin.api.migrateHDL.failure.must.be.hdl.dataset"));
@@ -1500,192 +1505,305 @@ public Response reregisterHdlToPID(@PathParam("id") String id) {
         return ok(BundleUtil.getStringFromBundle("admin.api.migrateHDL.success"));
     }
 
-	@GET
-	@Path("{id}/registerDataFile")
-	public Response registerDataFile(@PathParam("id") String id) {
-		logger.info("Starting to register  " + id + " file id. " + new Date());
+    @GET
+    @AuthRequired
+    @Path("{id}/registerDataFile")
+    public Response registerDataFile(@Context ContainerRequestContext crc, @PathParam("id") String id) {
+        logger.info("Starting to register  " + id + " file id. " + new Date());
 
-		try {
-			User u = findUserOrDie();
-			DataverseRequest r = createDataverseRequest(u);
-			DataFile df = findDataFileOrDie(id);
-			if (df.getIdentifier() == null || df.getIdentifier().isEmpty()) {
-				execCommand(new RegisterDvObjectCommand(r, df));
-			} else {
-				return ok("File was already registered. ");
-			}
+        try {
+            User u = getRequestUser(crc);
+            DataverseRequest r = createDataverseRequest(u);
+            DataFile df = findDataFileOrDie(id);
+            if(!systemConfig.isFilePIDsEnabledForCollection(df.getOwner().getOwner())) {
+                return forbidden("PIDs are not enabled for this file's collection.");
+            }
+            if (df.getIdentifier() == null || df.getIdentifier().isEmpty()) {
+                execCommand(new RegisterDvObjectCommand(r, df));
+            } else {
+                return ok("File was already registered. ");
+            }
 
-		} catch (WrappedResponse r) {
-			logger.info("Failed to register file id: " + id);
-		} catch (Exception e) {
-			logger.info("Failed to register file id: " + id + " Unexpecgted Exception " + e.getMessage());
-		}
-		return ok("Datafile registration complete. File registered successfully.");
-	}
+        } catch (WrappedResponse r) {
+            logger.info("Failed to register file id: " + id);
+        } catch (Exception e) {
+            logger.info("Failed to register file id: " + id + " Unexpecgted Exception " + e.getMessage());
+        }
+        return ok("Datafile registration complete. File registered successfully.");
+    }
 
-	@GET
-	@Path("/registerDataFileAll")
-	public Response registerDataFileAll() {
-		Integer count = fileService.findAll().size();
-		Integer successes = 0;
-		Integer alreadyRegistered = 0;
-		Integer released = 0;
-		Integer draft = 0;
-		logger.info("Starting to register: analyzing " + count + " files. " + new Date());
-		logger.info("Only unregistered, published files will be registered.");
-		for (DataFile df : fileService.findAll()) {
-			try {
-				if ((df.getIdentifier() == null || df.getIdentifier().isEmpty())) {
-					if (df.isReleased()) {
-						released++;
-						User u = findAuthenticatedUserOrDie();
-						DataverseRequest r = createDataverseRequest(u);
-						execCommand(new RegisterDvObjectCommand(r, df));
-						successes++;
-						if (successes % 100 == 0) {
-							logger.info(successes + " of  " + count + " files registered successfully. " + new Date());
-						}
-					} else {
-						draft++;
-						logger.info(draft + " of  " + count + " files not yet published");
-					}
-				} else {
-					alreadyRegistered++;
-					logger.info(alreadyRegistered + " of  " + count + " files are already registered. " + new Date());
-				}
-			} catch (WrappedResponse ex) {
-				released++;
-				logger.info("Failed to register file id: " + df.getId());
-				Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
-			} catch (Exception e) {
-				logger.info("Unexpected Exception: " + e.getMessage());
-			}
-		}
-		logger.info("Final Results:");
-		logger.info(alreadyRegistered + " of  " + count + " files were already registered. " + new Date());
-		logger.info(draft + " of  " + count + " files are not yet published. " + new Date());
-		logger.info(released + " of  " + count + " unregistered, published files to register. " + new Date());
-		logger.info(successes + " of  " + released + " unregistered, published files registered successfully. "
-				+ new Date());
-
-		return ok("Datafile registration complete." + successes + " of  " + released
-				+ " unregistered, published files registered successfully.");
-	}
+    @GET
+    @AuthRequired
+    @Path("/registerDataFileAll")
+    public Response registerDataFileAll(@Context ContainerRequestContext crc) {
+        Integer count = fileService.findAll().size();
+        Integer successes = 0;
+        Integer alreadyRegistered = 0;
+        Integer released = 0;
+        Integer draft = 0;
+        Integer skipped = 0;
+        logger.info("Starting to register: analyzing " + count + " files. " + new Date());
+        logger.info("Only unregistered, published files will be registered.");
+        User u = null;
+        try {
+            u = getRequestAuthenticatedUserOrDie(crc);
+        } catch (WrappedResponse e1) {
+            return error(Status.UNAUTHORIZED, "api key required");
+        }
+        DataverseRequest r = createDataverseRequest(u);
+        for (DataFile df : fileService.findAll()) {
+            try {
+                if ((df.getIdentifier() == null || df.getIdentifier().isEmpty())) {
+                    if(!systemConfig.isFilePIDsEnabledForCollection(df.getOwner().getOwner())) {
+                        skipped++;
+                        if (skipped % 100 == 0) {
+                            logger.info(skipped + " of  " + count + " files not in collections that allow file PIDs. " + new Date());
+                        }
+                    } else if (df.isReleased()) {
+                        released++;
+                        execCommand(new RegisterDvObjectCommand(r, df));
+                        successes++;
+                        if (successes % 100 == 0) {
+                            logger.info(successes + " of  " + count + " files registered successfully. " + new Date());
+                        }
+                        try {
+                            Thread.sleep(1000);
+                        } catch (InterruptedException ie) {
+                            logger.warning("Interrupted Exception when attempting to execute Thread.sleep()!");
+                        }
+                    } else {
+                        draft++;
+                        if (draft % 100 == 0) {
+                          logger.info(draft + " of  " + count + " files not yet published");
+                        }
+                    }
+                } else {
+                    alreadyRegistered++;
+                    if(alreadyRegistered % 100 == 0) {
+                      logger.info(alreadyRegistered + " of  " + count + " files are already registered. " + new Date());
+                    }
+                }
+            } catch (WrappedResponse ex) {
+                logger.info("Failed to register file id: " + df.getId());
+                Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
+            } catch (Exception e) {
+                logger.info("Unexpected Exception: " + e.getMessage());
+            }
+            
 
-	@GET
-	@Path("/updateHashValues/{alg}")
-	public Response updateHashValues(@PathParam("alg") String alg, @QueryParam("num") int num) {
-		Integer count = fileService.findAll().size();
-		Integer successes = 0;
-		Integer alreadyUpdated = 0;
-		Integer rehashed = 0;
-		Integer harvested=0;
-		
-		if (num <= 0)
-			num = Integer.MAX_VALUE;
-		DataFile.ChecksumType cType = null;
-		try {
-			cType = DataFile.ChecksumType.fromString(alg);
-		} catch (IllegalArgumentException iae) {
-			return error(Status.BAD_REQUEST, "Unknown algorithm");
-		}
-		logger.info("Starting to rehash: analyzing " + count + " files. " + new Date());
-		logger.info("Hashes not created with " + alg + " will be verified, and, if valid, replaced with a hash using "
-				+ alg);
-		try {
-			User u = findAuthenticatedUserOrDie();
-			if (!u.isSuperuser())
-				return error(Status.UNAUTHORIZED, "must be superuser");
-		} catch (WrappedResponse e1) {
-			return error(Status.UNAUTHORIZED, "api key required");
-		}
+        }
+        logger.info("Final Results:");
+        logger.info(alreadyRegistered + " of  " + count + " files were already registered. " + new Date());
+        logger.info(draft + " of  " + count + " files are not yet published. " + new Date());
+        logger.info(released + " of  " + count + " unregistered, published files to register. " + new Date());
+        logger.info(successes + " of  " + released + " unregistered, published files registered successfully. "
+                + new Date());
+        logger.info(skipped + " of  " + count + " files not in collections that allow file PIDs. " + new Date());
+
+        return ok("Datafile registration complete." + successes + " of  " + released
+                + " unregistered, published files registered successfully.");
+    }
+    
+    @GET
+    @AuthRequired
+    @Path("/registerDataFiles/{alias}")
+    public Response registerDataFilesInCollection(@Context ContainerRequestContext crc, @PathParam("alias") String alias, @QueryParam("sleep") Integer sleepInterval) {
+        Dataverse collection;
+        try {
+            collection = findDataverseOrDie(alias);
+        } catch (WrappedResponse r) {
+            return r.getResponse();
+        }
+        
+        AuthenticatedUser superuser = authSvc.getAdminUser();
+        if (superuser == null) {
+            return error(Response.Status.INTERNAL_SERVER_ERROR, "Cannot find the superuser to execute /admin/registerDataFiles.");
+        }
+        
+        if (!systemConfig.isFilePIDsEnabledForCollection(collection)) {
+            return ok("Registration of file-level pid is disabled in collection "+alias+"; nothing to do");
+        }
+        
+        List<DataFile> dataFiles = fileService.findByDirectCollectionOwner(collection.getId());
+        Integer count = dataFiles.size();
+        Integer countSuccesses = 0;
+        Integer countAlreadyRegistered = 0;
+        Integer countReleased = 0;
+        Integer countDrafts = 0;
+        
+        if (sleepInterval == null) {
+            sleepInterval = 1; 
+        } else if (sleepInterval.intValue() < 1) {
+            return error(Response.Status.BAD_REQUEST, "Invalid sleep interval: "+sleepInterval);
+        }
+        
+        logger.info("Starting to register: analyzing " + count + " files. " + new Date());
+        logger.info("Only unregistered, published files will be registered.");
+        
+        
+        
+        for (DataFile df : dataFiles) {
+            try {
+                if ((df.getIdentifier() == null || df.getIdentifier().isEmpty())) {
+                    if (df.isReleased()) {
+                        countReleased++;
+                        DataverseRequest r = createDataverseRequest(superuser);
+                        execCommand(new RegisterDvObjectCommand(r, df));
+                        countSuccesses++;
+                        if (countSuccesses % 100 == 0) {
+                            logger.info(countSuccesses + " out of " + count + " files registered successfully. " + new Date());
+                        }
+                        try {
+                            Thread.sleep(sleepInterval * 1000);
+                        } catch (InterruptedException ie) {
+                            logger.warning("Interrupted Exception when attempting to execute Thread.sleep()!");
+                        }
+                    } else {
+                        countDrafts++;
+                        logger.fine(countDrafts + " out of " + count + " files not yet published");
+                    }
+                } else {
+                    countAlreadyRegistered++;
+                    logger.fine(countAlreadyRegistered + " out of " + count + " files are already registered. " + new Date());
+                }
+            } catch (WrappedResponse ex) {
+                countReleased++;
+                logger.info("Failed to register file id: " + df.getId());
+                Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex);
+            } catch (Exception e) {
+                logger.info("Unexpected Exception: " + e.getMessage());
+            }
+        }
+        
+        logger.info(countAlreadyRegistered + " out of " + count + " files were already registered. " + new Date());
+        logger.info(countDrafts + " out of " + count + " files are not yet published. " + new Date());
+        logger.info(countReleased + " out of " + count + " unregistered, published files to register. " + new Date());
+        logger.info(countSuccesses + " out of " + countReleased + " unregistered, published files registered successfully. "
+                + new Date());
+
+        return ok("Datafile registration complete. " + countSuccesses + " out of " + countReleased
+                + " unregistered, published files registered successfully.");
+    }
 
-		for (DataFile df : fileService.findAll()) {
-			if (rehashed.intValue() >= num)
-				break;
-			InputStream in = null;
-			InputStream in2 = null; 
-			try {
-				if (df.isHarvested()) {
-					harvested++;
-				} else {
-					if (!df.getChecksumType().equals(cType)) {
-
-						rehashed++;
-						logger.fine(rehashed + ": Datafile: " + df.getFileMetadata().getLabel() + ", "
-								+ df.getIdentifier());
-						// verify hash and calc new one to replace it
-						StorageIO<DataFile> storage = df.getStorageIO();
-						storage.open(DataAccessOption.READ_ACCESS);
-						if (!df.isTabularData()) {
-							in = storage.getInputStream();
-						} else {
-							// if this is a tabular file, read the preserved original "auxiliary file"
-							// instead:
-							in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION);
-						}
-						if (in == null)
-							logger.warning("Cannot retrieve file.");
-						String currentChecksum = FileUtil.calculateChecksum(in, df.getChecksumType());
-						if (currentChecksum.equals(df.getChecksumValue())) {
-							logger.fine("Current checksum for datafile: " + df.getFileMetadata().getLabel() + ", "
-									+ df.getIdentifier() + " is valid");
-							storage.open(DataAccessOption.READ_ACCESS);
-							if (!df.isTabularData()) {
-								in2 = storage.getInputStream();
-							} else {
-								// if this is a tabular file, read the preserved original "auxiliary file"
-								// instead:
-								in2 = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION);
-							}
-							if (in2 == null)
-								logger.warning("Cannot retrieve file to calculate new checksum.");
-							String newChecksum = FileUtil.calculateChecksum(in2, cType);
-
-							df.setChecksumType(cType);
-							df.setChecksumValue(newChecksum);
-							successes++;
-							if (successes % 100 == 0) {
-								logger.info(
-										successes + " of  " + count + " files rehashed successfully. " + new Date());
-							}
-						} else {
-							logger.warning("Problem: Current checksum for datafile: " + df.getFileMetadata().getLabel()
-									+ ", " + df.getIdentifier() + " is INVALID");
-						}
-					} else {
-						alreadyUpdated++;
-						if (alreadyUpdated % 100 == 0) {
-							logger.info(alreadyUpdated + " of  " + count
-									+ " files are already have hashes with the new algorithm. " + new Date());
-						}
-					}
-				}
-			} catch (Exception e) {
-				logger.warning("Unexpected Exception: " + e.getMessage());
+    @GET
+    @AuthRequired
+    @Path("/updateHashValues/{alg}")
+    public Response updateHashValues(@Context ContainerRequestContext crc, @PathParam("alg") String alg, @QueryParam("num") int num) {
+        Integer count = fileService.findAll().size();
+        Integer successes = 0;
+        Integer alreadyUpdated = 0;
+        Integer rehashed = 0;
+        Integer harvested = 0;
+
+        if (num <= 0)
+            num = Integer.MAX_VALUE;
+        DataFile.ChecksumType cType = null;
+        try {
+            cType = DataFile.ChecksumType.fromString(alg);
+        } catch (IllegalArgumentException iae) {
+            return error(Status.BAD_REQUEST, "Unknown algorithm");
+        }
+        logger.info("Starting to rehash: analyzing " + count + " files. " + new Date());
+        logger.info("Hashes not created with " + alg + " will be verified, and, if valid, replaced with a hash using "
+                + alg);
+        try {
+            User u = getRequestAuthenticatedUserOrDie(crc);
+            if (!u.isSuperuser())
+                return error(Status.UNAUTHORIZED, "must be superuser");
+        } catch (WrappedResponse e1) {
+            return error(Status.UNAUTHORIZED, "api key required");
+        }
 
-			} finally {
-				IOUtils.closeQuietly(in);
-				IOUtils.closeQuietly(in2);
-			}
-		}
-		logger.info("Final Results:");
-		logger.info(harvested + " harvested files skipped.");
-		logger.info(
-				alreadyUpdated + " of  " + count + " files already had hashes with the new algorithm. " + new Date());
-		logger.info(rehashed + " of  " + count + " files to rehash. " + new Date());
-		logger.info(
-				successes + " of  " + rehashed + " files successfully rehashed with the new algorithm. " + new Date());
-
-		return ok("Datafile rehashing complete." + successes + " of  " + rehashed + " files successfully rehashed.");
-	}
+        for (DataFile df : fileService.findAll()) {
+            if (rehashed.intValue() >= num)
+                break;
+            InputStream in = null;
+            InputStream in2 = null;
+            try {
+                if (df.isHarvested()) {
+                    harvested++;
+                } else {
+                    if (!df.getChecksumType().equals(cType)) {
+
+                        rehashed++;
+                        logger.fine(rehashed + ": Datafile: " + df.getFileMetadata().getLabel() + ", "
+                                + df.getIdentifier());
+                        // verify hash and calc new one to replace it
+                        StorageIO<DataFile> storage = df.getStorageIO();
+                        storage.open(DataAccessOption.READ_ACCESS);
+                        if (!df.isTabularData()) {
+                            in = storage.getInputStream();
+                        } else {
+                            // if this is a tabular file, read the preserved original "auxiliary file"
+                            // instead:
+                            in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION);
+                        }
+                        if (in == null)
+                            logger.warning("Cannot retrieve file.");
+                        String currentChecksum = FileUtil.calculateChecksum(in, df.getChecksumType());
+                        if (currentChecksum.equals(df.getChecksumValue())) {
+                            logger.fine("Current checksum for datafile: " + df.getFileMetadata().getLabel() + ", "
+                                    + df.getIdentifier() + " is valid");
+                            // Need to reset so we don't get the same stream (StorageIO class inputstreams
+                            // are normally only used once)
+                            storage.setInputStream(null);
+                            storage.open(DataAccessOption.READ_ACCESS);
+                            if (!df.isTabularData()) {
+                                in2 = storage.getInputStream();
+                            } else {
+                                // if this is a tabular file, read the preserved original "auxiliary file"
+                                // instead:
+                                in2 = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION);
+                            }
+                            if (in2 == null)
+                                logger.warning("Cannot retrieve file to calculate new checksum.");
+                            String newChecksum = FileUtil.calculateChecksum(in2, cType);
+
+                            df.setChecksumType(cType);
+                            df.setChecksumValue(newChecksum);
+                            successes++;
+                            if (successes % 100 == 0) {
+                                logger.info(
+                                        successes + " of  " + count + " files rehashed successfully. " + new Date());
+                            }
+                        } else {
+                            logger.warning("Problem: Current checksum for datafile: " + df.getFileMetadata().getLabel()
+                                    + ", " + df.getIdentifier() + " is INVALID");
+                        }
+                    } else {
+                        alreadyUpdated++;
+                        if (alreadyUpdated % 100 == 0) {
+                            logger.info(alreadyUpdated + " of  " + count
+                                    + " files are already have hashes with the new algorithm. " + new Date());
+                        }
+                    }
+                }
+            } catch (Exception e) {
+                logger.warning("Unexpected Exception: " + e.getMessage());
+
+            } finally {
+                IOUtils.closeQuietly(in);
+                IOUtils.closeQuietly(in2);
+            }
+        }
+        logger.info("Final Results:");
+        logger.info(harvested + " harvested files skipped.");
+        logger.info(
+                alreadyUpdated + " of  " + count + " files already had hashes with the new algorithm. " + new Date());
+        logger.info(rehashed + " of  " + count + " files to rehash. " + new Date());
+        logger.info(
+                successes + " of  " + rehashed + " files successfully rehashed with the new algorithm. " + new Date());
+
+        return ok("Datafile rehashing complete." + successes + " of  " + rehashed + " files successfully rehashed.");
+    }
         
     @POST
+	@AuthRequired
     @Path("/computeDataFileHashValue/{fileId}/algorithm/{alg}")
-    public Response computeDataFileHashValue(@PathParam("fileId") String fileId, @PathParam("alg") String alg) {
+    public Response computeDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId, @PathParam("alg") String alg) {
 
         try {
-            User u = findAuthenticatedUserOrDie();
+            User u = getRequestAuthenticatedUserOrDie(crc);
             if (!u.isSuperuser()) {
                 return error(Status.UNAUTHORIZED, "must be superuser");
             }
@@ -1742,11 +1860,12 @@ public Response computeDataFileHashValue(@PathParam("fileId") String fileId, @Pa
     }
     
     @POST
+	@AuthRequired
     @Path("/validateDataFileHashValue/{fileId}")
-    public Response validateDataFileHashValue(@PathParam("fileId") String fileId) {
+    public Response validateDataFileHashValue(@Context ContainerRequestContext crc, @PathParam("fileId") String fileId) {
 
         try {
-            User u = findAuthenticatedUserOrDie();
+            User u = getRequestAuthenticatedUserOrDie(crc);
             if (!u.isSuperuser()) {
                 return error(Status.UNAUTHORIZED, "must be superuser");
             }
@@ -1808,12 +1927,13 @@ public Response validateDataFileHashValue(@PathParam("fileId") String fileId) {
     }
 
     @POST
+	@AuthRequired
     @Path("/submitDatasetVersionToArchive/{id}/{version}")
-    public Response submitDatasetVersionToArchive(@PathParam("id") String dsid,
+    public Response submitDatasetVersionToArchive(@Context ContainerRequestContext crc, @PathParam("id") String dsid,
             @PathParam("version") String versionNumber) {
 
         try {
-            AuthenticatedUser au = findAuthenticatedUserOrDie();
+            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
 
             Dataset ds = findDatasetOrDie(dsid);
 
@@ -1880,11 +2000,12 @@ public void run() {
      * @return
      */
     @POST
+	@AuthRequired
     @Path("/archiveAllUnarchivedDatasetVersions")
-    public Response archiveAllUnarchivedDatasetVersions(@QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) {
+    public Response archiveAllUnarchivedDatasetVersions(@Context ContainerRequestContext crc, @QueryParam("listonly") boolean listonly, @QueryParam("limit") Integer limit, @QueryParam("latestonly") boolean latestonly) {
 
         try {
-            AuthenticatedUser au = findAuthenticatedUserOrDie();
+            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
 
             List<DatasetVersion> dsl = datasetversionService.getUnarchivedDatasetVersions();
             if (dsl != null) {
@@ -1961,31 +2082,32 @@ public void run() {
         }
     }
     
-	@DELETE
-	@Path("/clearMetricsCache")
-	public Response clearMetricsCache() {
-		em.createNativeQuery("DELETE FROM metric").executeUpdate();
-		return ok("all metric caches cleared.");
-	}
+    @DELETE
+    @Path("/clearMetricsCache")
+    public Response clearMetricsCache() {
+        em.createNativeQuery("DELETE FROM metric").executeUpdate();
+        return ok("all metric caches cleared.");
+    }
 
-	@DELETE
-	@Path("/clearMetricsCache/{name}")
-	public Response clearMetricsCacheByName(@PathParam("name") String name) {
-		Query deleteQuery = em.createNativeQuery("DELETE FROM metric where metricname = ?");
-		deleteQuery.setParameter(1, name);
-		deleteQuery.executeUpdate();
-		return ok("metric cache " + name + " cleared.");
-	}
+    @DELETE
+    @Path("/clearMetricsCache/{name}")
+    public Response clearMetricsCacheByName(@PathParam("name") String name) {
+        Query deleteQuery = em.createNativeQuery("DELETE FROM metric where name = ?");
+        deleteQuery.setParameter(1, name);
+        deleteQuery.executeUpdate();
+        return ok("metric cache " + name + " cleared.");
+    }
 
     @GET
+	@AuthRequired
     @Path("/dataverse/{alias}/addRoleAssignmentsToChildren")
-    public Response addRoleAssignementsToChildren(@PathParam("alias") String alias) throws WrappedResponse {
+    public Response addRoleAssignementsToChildren(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
         Dataverse owner = dataverseSvc.findByAlias(alias);
         if (owner == null) {
             return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
         }
         try {
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             if (!user.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -2008,14 +2130,15 @@ public Response addRoleAssignementsToChildren(@PathParam("alias") String alias)
     }
     
     @GET
+	@AuthRequired
     @Path("/dataverse/{alias}/storageDriver")
-    public Response getStorageDriver(@PathParam("alias") String alias) throws WrappedResponse {
+    public Response getStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
     	Dataverse dataverse = dataverseSvc.findByAlias(alias);
     	if (dataverse == null) {
     		return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
     	}
     	try {
-    		AuthenticatedUser user = findAuthenticatedUserOrDie();
+    		AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
     		if (!user.isSuperuser()) {
     			return error(Response.Status.FORBIDDEN, "Superusers only.");
     		}
@@ -2027,14 +2150,15 @@ public Response getStorageDriver(@PathParam("alias") String alias) throws Wrappe
     }
     
     @PUT
+	@AuthRequired
     @Path("/dataverse/{alias}/storageDriver")
-    public Response setStorageDriver(@PathParam("alias") String alias, String label) throws WrappedResponse {
+    public Response setStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias, String label) throws WrappedResponse {
     	Dataverse dataverse = dataverseSvc.findByAlias(alias);
     	if (dataverse == null) {
     		return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
     	}
     	try {
-    		AuthenticatedUser user = findAuthenticatedUserOrDie();
+    		AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
     		if (!user.isSuperuser()) {
     			return error(Response.Status.FORBIDDEN, "Superusers only.");
     		}
@@ -2052,14 +2176,15 @@ public Response setStorageDriver(@PathParam("alias") String alias, String label)
     }
 
     @DELETE
+	@AuthRequired
     @Path("/dataverse/{alias}/storageDriver")
-    public Response resetStorageDriver(@PathParam("alias") String alias) throws WrappedResponse {
+    public Response resetStorageDriver(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
     	Dataverse dataverse = dataverseSvc.findByAlias(alias);
     	if (dataverse == null) {
     		return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
     	}
     	try {
-    		AuthenticatedUser user = findAuthenticatedUserOrDie();
+    		AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
     		if (!user.isSuperuser()) {
     			return error(Response.Status.FORBIDDEN, "Superusers only.");
     		}
@@ -2071,10 +2196,11 @@ public Response resetStorageDriver(@PathParam("alias") String alias) throws Wrap
     }
     
     @GET
+	@AuthRequired
     @Path("/dataverse/storageDrivers")
-    public Response listStorageDrivers() throws WrappedResponse {
+    public Response listStorageDrivers(@Context ContainerRequestContext crc) throws WrappedResponse {
     	try {
-    		AuthenticatedUser user = findAuthenticatedUserOrDie();
+    		AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
     		if (!user.isSuperuser()) {
     			return error(Response.Status.FORBIDDEN, "Superusers only.");
     		}
@@ -2087,14 +2213,15 @@ public Response listStorageDrivers() throws WrappedResponse {
     }
     
     @GET
+	@AuthRequired
     @Path("/dataverse/{alias}/curationLabelSet")
-    public Response getCurationLabelSet(@PathParam("alias") String alias) throws WrappedResponse {
+    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
         Dataverse dataverse = dataverseSvc.findByAlias(alias);
         if (dataverse == null) {
             return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
         }
         try {
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             if (!user.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -2108,14 +2235,15 @@ public Response getCurationLabelSet(@PathParam("alias") String alias) throws Wra
     }
 
     @PUT
+	@AuthRequired
     @Path("/dataverse/{alias}/curationLabelSet")
-    public Response setCurationLabelSet(@PathParam("alias") String alias, @QueryParam("name") String name) throws WrappedResponse {
+    public Response setCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias, @QueryParam("name") String name) throws WrappedResponse {
         Dataverse dataverse = dataverseSvc.findByAlias(alias);
         if (dataverse == null) {
             return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
         }
         try {
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             if (!user.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -2138,14 +2266,15 @@ public Response setCurationLabelSet(@PathParam("alias") String alias, @QueryPara
     }
 
     @DELETE
+	@AuthRequired
     @Path("/dataverse/{alias}/curationLabelSet")
-    public Response resetCurationLabelSet(@PathParam("alias") String alias) throws WrappedResponse {
+    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("alias") String alias) throws WrappedResponse {
         Dataverse dataverse = dataverseSvc.findByAlias(alias);
         if (dataverse == null) {
             return error(Response.Status.NOT_FOUND, "Could not find dataverse based on alias supplied: " + alias + ".");
         }
         try {
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             if (!user.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -2157,10 +2286,11 @@ public Response resetCurationLabelSet(@PathParam("alias") String alias) throws W
     }
 
     @GET
+	@AuthRequired
     @Path("/dataverse/curationLabelSets")
-    public Response listCurationLabelSets() throws WrappedResponse {
+    public Response listCurationLabelSets(@Context ContainerRequestContext crc) throws WrappedResponse {
         try {
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             if (!user.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -2251,12 +2381,13 @@ public Response getBannerMessages(@PathParam("id") Long id) throws WrappedRespon
     }
     
     @POST
+	@AuthRequired
     @Consumes("application/json")
     @Path("/requestSignedUrl")
-    public Response getSignedUrl(JsonObject urlInfo) {
+    public Response getSignedUrl(@Context ContainerRequestContext crc, JsonObject urlInfo) {
         AuthenticatedUser superuser = null;
         try {
-            superuser = findAuthenticatedUserOrDie();
+            superuser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
@@ -2289,12 +2420,60 @@ public Response getSignedUrl(JsonObject urlInfo) {
         }
         
         String baseUrl = urlInfo.getString("url");
-        int timeout = urlInfo.getInt(ExternalToolHandler.TIMEOUT, 10);
-        String method = urlInfo.getString(ExternalToolHandler.HTTP_METHOD, "GET");
+        int timeout = urlInfo.getInt(URLTokenUtil.TIMEOUT, 10);
+        String method = urlInfo.getString(URLTokenUtil.HTTP_METHOD, "GET");
         
         String signedUrl = UrlSignerUtil.signUrl(baseUrl, timeout, userId, method, key); 
         
-        return ok(Json.createObjectBuilder().add(ExternalToolHandler.SIGNED_URL, signedUrl));
+        return ok(Json.createObjectBuilder().add(URLTokenUtil.SIGNED_URL, signedUrl));
     }
  
+    @DELETE
+    @Path("/clearThumbnailFailureFlag")
+    public Response clearThumbnailFailureFlag() {
+        em.createNativeQuery("UPDATE dvobject SET previewimagefail = FALSE").executeUpdate();
+        return ok("Thumbnail Failure Flags cleared.");
+    }
+    
+    @DELETE
+    @Path("/clearThumbnailFailureFlag/{id}")
+    public Response clearThumbnailFailureFlagByDatafile(@PathParam("id") String fileId) {
+        try {
+            DataFile df = findDataFileOrDie(fileId);
+            Query deleteQuery = em.createNativeQuery("UPDATE dvobject SET previewimagefail = FALSE where id = ?");
+            deleteQuery.setParameter(1, df.getId());
+            deleteQuery.executeUpdate();
+            return ok("Thumbnail Failure Flag cleared for file id=: " + df.getId() + ".");
+        } catch (WrappedResponse r) {
+            logger.info("Could not find file with the id: " + fileId);
+            return error(Status.BAD_REQUEST, "Could not find file with the id: " + fileId);
+        }
+    }
+
+    /**
+     * For testing only. Download a file from /tmp.
+     */
+    @GET
+    @AuthRequired
+    @Path("/downloadTmpFile")
+    public Response downloadTmpFile(@Context ContainerRequestContext crc, @QueryParam("fullyQualifiedPathToFile") String fullyQualifiedPathToFile) {
+        try {
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
+            if (!user.isSuperuser()) {
+                return error(Response.Status.FORBIDDEN, "Superusers only.");
+            }
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        java.nio.file.Path normalizedPath = Paths.get(fullyQualifiedPathToFile).normalize();
+        if (!normalizedPath.toString().startsWith("/tmp")) {
+            return error(Status.BAD_REQUEST, "Path must begin with '/tmp' but after normalization was '" + normalizedPath +"'.");
+        }
+        try {
+            return ok(new FileInputStream(fullyQualifiedPathToFile));
+        } catch (IOException ex) {
+            return error(Status.BAD_REQUEST, ex.toString());
+        }
+    }
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
index 6bf852d25f7..0e5b8226310 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java
@@ -10,21 +10,22 @@
 import java.util.TreeSet;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.ejb.EJB;
+import jakarta.servlet.Filter;
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.FilterConfig;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.ServletRequest;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 
 
 /**
  * A web filter to block API administration calls.
  * @author michael
  */
-public class ApiBlockingFilter implements javax.servlet.Filter {
+public class ApiBlockingFilter implements Filter {
     public static final String UNBLOCK_KEY_QUERYPARAM = "unblock-key";
             
     interface BlockPolicy {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
index eead559f15e..d076ab8f973 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConfiguration.java
@@ -1,6 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import javax.ws.rs.ApplicationPath;
+import jakarta.ws.rs.ApplicationPath;
+
+import edu.harvard.iq.dataverse.api.auth.AuthFilter;
 import org.glassfish.jersey.media.multipart.MultiPartFeature;
 import org.glassfish.jersey.server.ResourceConfig;
 
@@ -11,9 +13,6 @@ public ApiConfiguration() {
        packages("edu.harvard.iq.dataverse.api");
        packages("edu.harvard.iq.dataverse.mydata");
        register(MultiPartFeature.class);
+       register(AuthFilter.class);
    }
 }
-/*
-public class ApiConfiguration extends ResourceConfi {
-}
-*/
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java
new file mode 100644
index 00000000000..347a8946a46
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java
@@ -0,0 +1,20 @@
+package edu.harvard.iq.dataverse.api;
+
+public final class ApiConstants {
+
+    private ApiConstants() {
+        // Restricting instantiation
+    }
+
+    // Statuses
+    public static final String STATUS_OK = "OK";
+    public static final String STATUS_ERROR = "ERROR";
+
+    // Authentication
+    public static final String CONTAINER_REQUEST_CONTEXT_USER = "user";
+
+    // Dataset
+    public static final String DS_VERSION_LATEST = ":latest";
+    public static final String DS_VERSION_DRAFT = ":draft";
+    public static final String DS_VERSION_LATEST_PUBLISHED = ":latest-published";
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java
index 691afeaef20..193e1059415 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiRouter.java
@@ -1,21 +1,22 @@
 package edu.harvard.iq.dataverse.api;
 
+import jakarta.servlet.Filter;
 import java.io.IOException;
 import java.util.logging.Logger;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.RequestDispatcher;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.FilterConfig;
+import jakarta.servlet.RequestDispatcher;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.ServletRequest;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  * Routes API calls that don't have a version number to the latest API version
  * 
  * @author michael
  */
-public class ApiRouter implements javax.servlet.Filter {
+public class ApiRouter implements Filter {
     private static final Logger logger = Logger.getLogger(ApiRouter.class.getName());
     
     @Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java
index 7b44d920fbe..a2d06bff93e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchImport.java
@@ -1,5 +1,6 @@
 package edu.harvard.iq.dataverse.api;
 
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.api.imports.ImportServiceBean;
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
@@ -9,18 +10,21 @@
 
 import edu.harvard.iq.dataverse.api.imports.ImportException;
 import edu.harvard.iq.dataverse.api.imports.ImportUtil.ImportType;
+import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.io.IOException;
 import java.io.PrintWriter;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 @Stateless
 @Path("batch")
@@ -42,10 +46,14 @@ public class BatchImport extends AbstractApiBean {
     BatchServiceBean batchService;
 
     @GET
+    @AuthRequired
     @Path("harvest")
-    public Response harvest(@QueryParam("path") String fileDir, @QueryParam("dv") String parentIdtf, @QueryParam("createDV") Boolean createDV, @QueryParam("key") String apiKey) throws IOException {
-        return startBatchJob(fileDir, parentIdtf, apiKey, ImportType.HARVEST, createDV);
-
+    public Response harvest(@Context ContainerRequestContext crc, @QueryParam("path") String fileDir, @QueryParam("dv") String parentIdtf, @QueryParam("createDV") Boolean createDV, @QueryParam("key") String apiKey) throws IOException {
+        try {
+            return startBatchJob(getRequestAuthenticatedUserOrDie(crc), fileDir, parentIdtf, apiKey, ImportType.HARVEST, createDV);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
     }
 
     /**
@@ -57,12 +65,13 @@ public Response harvest(@QueryParam("path") String fileDir, @QueryParam("dv") St
      * @return import status (including id of the dataset created)
      */
     @POST
+    @AuthRequired
     @Path("import")
-    public Response postImport(String body, @QueryParam("dv") String parentIdtf, @QueryParam("key") String apiKey) {
+    public Response postImport(@Context ContainerRequestContext crc, String body, @QueryParam("dv") String parentIdtf, @QueryParam("key") String apiKey) {
 
         DataverseRequest dataverseRequest;
         try {
-            dataverseRequest = createDataverseRequest(findAuthenticatedUserOrDie());
+            dataverseRequest = createDataverseRequest(getRequestAuthenticatedUserOrDie(crc));
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
@@ -94,24 +103,23 @@ public Response postImport(String body, @QueryParam("dv") String parentIdtf, @Qu
      * @return import status (including id's of the datasets created)
      */
     @GET
+    @AuthRequired
     @Path("import")
-    public Response getImport(@QueryParam("path") String fileDir, @QueryParam("dv") String parentIdtf, @QueryParam("createDV") Boolean createDV, @QueryParam("key") String apiKey) {
-
-        return startBatchJob(fileDir, parentIdtf, apiKey, ImportType.NEW, createDV);
-
+    public Response getImport(@Context ContainerRequestContext crc, @QueryParam("path") String fileDir, @QueryParam("dv") String parentIdtf, @QueryParam("createDV") Boolean createDV, @QueryParam("key") String apiKey) {
+        try {
+            return startBatchJob(getRequestAuthenticatedUserOrDie(crc), fileDir, parentIdtf, apiKey, ImportType.NEW, createDV);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
     }
 
-    private Response startBatchJob(String fileDir, String parentIdtf, String apiKey, ImportType importType, Boolean createDV) {
+    private Response startBatchJob(User user, String fileDir, String parentIdtf, String apiKey, ImportType importType, Boolean createDV) {
         if (createDV == null) {
             createDV = Boolean.FALSE;
         }
         try {
             DataverseRequest dataverseRequest;
-            try {
-                dataverseRequest = createDataverseRequest(findAuthenticatedUserOrDie());
-            } catch (WrappedResponse wr) {
-                return wr.getResponse();
-            }
+            dataverseRequest = createDataverseRequest(user);
             if (parentIdtf == null) {
                 parentIdtf = "root";
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java
index 8fe58298481..daddc447117 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BatchServiceBean.java
@@ -14,12 +14,12 @@
 import java.util.Date;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 /**
  * EJB for kicking off big batch jobs asynchronously from the REST API  (BatchImport.java)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
index 0d0176eb636..50862bc0d35 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
@@ -3,6 +3,7 @@
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.UserNotification;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
+import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism;
 import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
@@ -12,25 +13,22 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.sql.Timestamp;
-import java.util.Calendar;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 import java.util.Date;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 
 /**
  * REST API bean for managing {@link BuiltinUser}s.
@@ -212,17 +210,14 @@ private Response internalSave(BuiltinUser user, String password, String key, Boo
         }
     }
 
+    /***
+     * This method was moved here from AbstractApiBean during the filter-based auth
+     * refactoring, in order to preserve the existing BuiltinUsers endpoints behavior.
+     *
+     * @param apiKey from request
+     * @return error Response
+     */
+    private Response badApiKey(String apiKey) {
+        return error(Status.UNAUTHORIZED, (apiKey != null) ? "Bad api key " : "Please provide a key query parameter (?key=XXX) or via the HTTP header " + ApiKeyAuthMechanism.DATAVERSE_API_KEY_REQUEST_HEADER_NAME);
+    }
 }
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java
index 7edb0ac838c..35f19375902 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java
@@ -12,14 +12,14 @@
 import java.io.OutputStream;
 import java.io.IOException;
 
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.WebApplicationException;
-
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.MessageBodyWriter;
-import javax.ws.rs.ext.Provider;
+import jakarta.ws.rs.InternalServerErrorException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.WebApplicationException;
+
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.ext.MessageBodyWriter;
+import jakarta.ws.rs.ext.Provider;
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.dataaccess.*;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java b/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java
index 063033d4747..d7c8bd827d1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DataTagsAPI.java
@@ -5,18 +5,18 @@
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.JsonObject;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.client.Client;
-import javax.ws.rs.client.ClientBuilder;
-import javax.ws.rs.client.WebTarget;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.client.Client;
+import jakarta.ws.rs.client.ClientBuilder;
+import jakarta.ws.rs.client.WebTarget;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
index 4ec728a8159..00b7dfa6e36 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApi.java
@@ -20,18 +20,18 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
@@ -41,9 +41,9 @@
 
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.persistence.NoResultException;
-import javax.persistence.TypedQuery;
-import javax.ws.rs.core.Response.Status;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.TypedQuery;
+import jakarta.ws.rs.core.Response.Status;
 
 import java.io.BufferedInputStream;
 import java.io.FileOutputStream;
@@ -207,7 +207,7 @@ public Response showControlledVocabularyForSubject() {
     @GET
     @Path("loadNAControlledVocabularyValue")
     public Response loadNAControlledVocabularyValue() {
-        // the find will throw a javax.persistence.NoResultException if no values are in db
+        // the find will throw a NoResultException if no values are in db
 //            datasetFieldService.findNAControlledVocabularyValue();
         TypedQuery<ControlledVocabularyValue> naValueFinder = em.createQuery("SELECT OBJECT(o) FROM ControlledVocabularyValue AS o WHERE o.datasetFieldType is null AND o.strValue = :strvalue", ControlledVocabularyValue.class);
         naValueFinder.setParameter("strvalue", DatasetField.NA_VALUE);
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 0bb6eebb80b..b3bfc476423 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -3,12 +3,14 @@
 import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.DatasetLock.Reason;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
 import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
@@ -27,6 +29,7 @@
 import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand;
@@ -61,10 +64,9 @@
 import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
-import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse;
 import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
-import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import edu.harvard.iq.dataverse.dataaccess.S3AccessIO;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
@@ -81,6 +83,7 @@
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
 import edu.harvard.iq.dataverse.metrics.MetricsUtil;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.ArchiverUtil;
 import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -88,12 +91,16 @@
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.MarkupChecker;
 import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.bagit.OREMap;
 import edu.harvard.iq.dataverse.util.json.JSONLDUtil;
 import edu.harvard.iq.dataverse.util.json.JsonLDTerm;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import edu.harvard.iq.dataverse.util.SignpostingResources;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
+
+import static edu.harvard.iq.dataverse.api.ApiConstants.*;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
@@ -103,10 +110,10 @@
 import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
 
 import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
+import edu.harvard.iq.dataverse.globus.GlobusUtil;
 
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.StringReader;
 import java.net.URI;
 import java.sql.Timestamp;
 import java.text.MessageFormat;
@@ -124,35 +131,36 @@
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.json.*;
-import javax.json.stream.JsonParsingException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.NotAcceptableException;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.*;
-import javax.ws.rs.core.Response.Status;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.json.*;
+import jakarta.json.stream.JsonParsingException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.DefaultValue;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.NotAcceptableException;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.*;
+import jakarta.ws.rs.core.Response.Status;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 
 import org.apache.commons.lang3.StringUtils;
-import org.apache.solr.client.solrj.SolrServerException;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataParam;
 import com.amazonaws.services.s3.model.PartETag;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 
 @Path("datasets")
 public class Datasets extends AbstractApiBean {
@@ -229,6 +237,12 @@ public class Datasets extends AbstractApiBean {
     @EJB
     DatasetVersionServiceBean datasetversionService;
 
+    @Inject
+    PrivateUrlServiceBean privateUrlService;
+
+    @Inject
+    DatasetVersionFilesServiceBean datasetVersionFilesServiceBean;
+
     /**
      * Used to consolidate the way we parse and handle dataset versions.
      * @param <T> 
@@ -241,8 +255,9 @@ public interface DsVersionHandler<T> {
     }
     
     @GET
+    @AuthRequired
     @Path("{id}")
-    public Response getDataset(@PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
+    public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
         return response( req -> {
             final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id)));
             final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved));
@@ -252,8 +267,8 @@ public Response getDataset(@PathParam("id") String id, @Context UriInfo uriInfo,
                 MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved);
                 mdcLogService.logEntry(entry);
             }
-            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null));
-        });
+            return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null));
+        }, getRequestUser(crc));
     }
     
     // TODO: 
@@ -264,7 +279,7 @@ public Response getDataset(@PathParam("id") String id, @Context UriInfo uriInfo,
     
     @GET
     @Path("/export")
-    @Produces({"application/xml", "application/json", "application/html" })
+    @Produces({"application/xml", "application/json", "application/html", "application/ld+json" })
     public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
 
         try {
@@ -293,8 +308,9 @@ public Response exportDataset(@QueryParam("persistentId") String persistentId, @
     }
 
     @DELETE
+    @AuthRequired
     @Path("{id}")
-    public Response deleteDataset( @PathParam("id") String id) {
+    public Response deleteDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
         // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand"
         // (and there's a comment that says "TODO: remove this command")
         // do we need an exposed API call for it? 
@@ -304,11 +320,11 @@ public Response deleteDataset( @PathParam("id") String id) {
         // "destroyDataset" API calls.  
         // (The logic below follows the current implementation of the underlying 
         // commands!)
-        
+
+        User u = getRequestUser(crc);
         return response( req -> {
             Dataset doomed = findDatasetOrDie(id);
             DatasetVersion doomedVersion = doomed.getLatestVersion();
-            User u = findUserOrDie();
             boolean destroy = false;
             
             if (doomed.getVersions().size() == 1) {
@@ -338,17 +354,18 @@ public Response deleteDataset( @PathParam("id") String id) {
             }
             
             return ok("Dataset " + id + " deleted");
-        });
+        }, u);
     }
         
     @DELETE
+    @AuthRequired
     @Path("{id}/destroy")
-    public Response destroyDataset(@PathParam("id") String id) {
+    public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam("id") String id) {
 
+        User u = getRequestUser(crc);
         return response(req -> {
             // first check if dataset is released, and if so, if user is a superuser
             Dataset doomed = findDatasetOrDie(id);
-            User u = findUserOrDie();
 
             if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
                 throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers."));
@@ -370,14 +387,15 @@ public Response destroyDataset(@PathParam("id") String id) {
             }
 
             return ok("Dataset " + id + " destroyed");
-        });
+        }, u);
     }
     
     @DELETE
+    @AuthRequired
     @Path("{id}/versions/{versionId}")
-    public Response deleteDraftVersion( @PathParam("id") String id,  @PathParam("versionId") String versionId ){
-        if ( ! ":draft".equals(versionId) ) {
-            return badRequest("Only the :draft version can be deleted");
+    public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
+        if (!DS_VERSION_DRAFT.equals(versionId)) {
+            return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted");
         }
 
         return response( req -> {
@@ -407,22 +425,24 @@ public Response deleteDraftVersion( @PathParam("id") String id,  @PathParam("ver
             }
             
             return ok("Draft version of dataset " + id + " deleted");
-        });
+        }, getRequestUser(crc));
     }
         
     @DELETE
+    @AuthRequired
     @Path("{datasetId}/deleteLink/{linkedDataverseId}")
-    public Response deleteDatasetLinkingDataverse( @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
+    public Response deleteDatasetLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) {
                 boolean index = true;
         return response(req -> {
             execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index));
             return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted");
-        });
+        }, getRequestUser(crc));
     }
         
     @PUT
+    @AuthRequired
     @Path("{id}/citationdate")
-    public Response setCitationDate( @PathParam("id") String id, String dsfTypeName) {
+    public Response setCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id, String dsfTypeName) {
         return response( req -> {
             if ( dsfTypeName.trim().isEmpty() ){
                 return badRequest("Please provide a dataset field type in the requst body.");
@@ -437,56 +457,149 @@ public Response setCitationDate( @PathParam("id") String id, String dsfTypeName)
 
             execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType));
             return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default"));
-        });
+        }, getRequestUser(crc));
     }
     
     @DELETE
+    @AuthRequired
     @Path("{id}/citationdate")
-    public Response useDefaultCitationDate( @PathParam("id") String id) {
+    public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @PathParam("id") String id) {
         return response( req -> {
             execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null));
             return ok("Citation Date for dataset " + id + " set to default");
-        });
+        }, getRequestUser(crc));
     }
     
     @GET
+    @AuthRequired
     @Path("{id}/versions")
-    public Response listVersions( @PathParam("id") String id ) {
-        return response( req ->
-             ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id)) )
+    public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("includeFiles") Boolean includeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) {
+
+        return response( req -> {
+            Dataset dataset = findDatasetOrDie(id);
+
+            return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, (includeFiles == null ? true : includeFiles)) )
                                 .stream()
-                                .map( d -> json(d) )
-                                .collect(toJsonArray())));
+                                .map( d -> json(d, includeFiles == null ? true : includeFiles) )
+                                .collect(toJsonArray()));
+        }, getRequestUser(crc));
     }
     
     @GET
+    @AuthRequired
     @Path("{id}/versions/{versionId}")
-    public Response getVersion( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+    public Response getVersion(@Context ContainerRequestContext crc,
+                               @PathParam("id") String datasetId,
+                               @PathParam("versionId") String versionId,
+                               @QueryParam("includeFiles") Boolean includeFiles,
+                               @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
+                               @Context UriInfo uriInfo,
+                               @Context HttpHeaders headers) {
         return response( req -> {
-            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
-            return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
-                    : ok(json(dsv));
-        });
+            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
+
+            if (dsv == null || dsv.getId() == null) {
+                return notFound("Dataset version not found");
+            }
+
+            if (includeFiles == null ? true : includeFiles) {
+                dsv = datasetversionService.findDeep(dsv.getId());
+            }
+            return ok(json(dsv, includeFiles == null ? true : includeFiles));
+        }, getRequestUser(crc));
     }
-    
+
     @GET
+    @AuthRequired
     @Path("{id}/versions/{versionId}/files")
-    public Response getVersionFiles( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
-        return response( req -> ok( jsonFileMetadatas(
-                         getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas())));
+    public Response getVersionFiles(@Context ContainerRequestContext crc,
+                                    @PathParam("id") String datasetId,
+                                    @PathParam("versionId") String versionId,
+                                    @QueryParam("limit") Integer limit,
+                                    @QueryParam("offset") Integer offset,
+                                    @QueryParam("contentType") String contentType,
+                                    @QueryParam("accessStatus") String accessStatus,
+                                    @QueryParam("categoryName") String categoryName,
+                                    @QueryParam("tabularTagName") String tabularTagName,
+                                    @QueryParam("searchText") String searchText,
+                                    @QueryParam("orderCriteria") String orderCriteria,
+                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
+                                    @Context UriInfo uriInfo,
+                                    @Context HttpHeaders headers) {
+        return response(req -> {
+            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
+            DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria;
+            try {
+                fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ;
+            } catch (IllegalArgumentException e) {
+                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria)));
+            }
+            FileSearchCriteria fileSearchCriteria;
+            try {
+                fileSearchCriteria = new FileSearchCriteria(
+                        contentType,
+                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
+                        categoryName,
+                        tabularTagName,
+                        searchText
+                );
+            } catch (IllegalArgumentException e) {
+                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
+            }
+            return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria)));
+        }, getRequestUser(crc));
     }
-    
+
+    @GET
+    @AuthRequired
+    @Path("{id}/versions/{versionId}/files/counts")
+    public Response getVersionFileCounts(@Context ContainerRequestContext crc,
+                                         @PathParam("id") String datasetId,
+                                         @PathParam("versionId") String versionId,
+                                         @QueryParam("contentType") String contentType,
+                                         @QueryParam("accessStatus") String accessStatus,
+                                         @QueryParam("categoryName") String categoryName,
+                                         @QueryParam("tabularTagName") String tabularTagName,
+                                         @QueryParam("searchText") String searchText,
+                                         @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
+                                         @Context UriInfo uriInfo,
+                                         @Context HttpHeaders headers) {
+        return response(req -> {
+            FileSearchCriteria fileSearchCriteria;
+            try {
+                fileSearchCriteria = new FileSearchCriteria(
+                        contentType,
+                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
+                        categoryName,
+                        tabularTagName,
+                        searchText
+                );
+            } catch (IllegalArgumentException e) {
+                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
+            }
+            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned);
+            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+            jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria));
+            jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria)));
+            jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria)));
+            jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria)));
+            jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria)));
+            return ok(jsonObjectBuilder);
+        }, getRequestUser(crc));
+    }
+
     @GET
+    @AuthRequired
     @Path("{id}/dirindex")
     @Produces("text/html")
-    public Response getFileAccessFolderView(@PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
+    public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) {
 
         folderName = folderName == null ? "" : folderName;
-        versionId = versionId == null ? ":latest-published" : versionId;
+        versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId;
         
         DatasetVersion version;
         try {
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
             version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
@@ -513,21 +626,24 @@ public Response getFileAccessFolderView(@PathParam("id") String datasetId, @Quer
     }
     
     @GET
+    @AuthRequired
     @Path("{id}/versions/{versionId}/metadata")
-    public Response getVersionMetadata( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+    public Response getVersionMetadata(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
         return response( req -> ok(
                     jsonByBlocks(
                         getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers )
-                                .getDatasetFields())));
+                                .getDatasetFields())), getRequestUser(crc));
     }
     
     @GET
+    @AuthRequired
     @Path("{id}/versions/{versionNumber}/metadata/{block}")
-    public Response getVersionMetadataBlock( @PathParam("id") String datasetId, 
-                                             @PathParam("versionNumber") String versionNumber, 
-                                             @PathParam("block") String blockName, 
-                                             @Context UriInfo uriInfo, 
-                                             @Context HttpHeaders headers ) {
+    public Response getVersionMetadataBlock(@Context ContainerRequestContext crc,
+                                            @PathParam("id") String datasetId,
+                                            @PathParam("versionNumber") String versionNumber,
+                                            @PathParam("block") String blockName,
+                                            @Context UriInfo uriInfo,
+                                            @Context HttpHeaders headers) {
         
         return response( req -> {
             DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers );
@@ -539,21 +655,56 @@ public Response getVersionMetadataBlock( @PathParam("id") String datasetId,
                 }
             }
             return notFound("metadata block named " + blockName + " not found");
-        });
+        }, getRequestUser(crc));
     }
-    
+
+    /**
+     * Add Signposting
+     * @param datasetId
+     * @param versionId
+     * @param uriInfo
+     * @param headers
+     * @return
+     */
+    @GET
+    @AuthRequired
+    @Path("{id}/versions/{versionId}/linkset")
+    public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, 
+           @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+        if (DS_VERSION_DRAFT.equals(versionId)) {
+            return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version");
+        }
+        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
+        try {
+            DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers);
+            return Response
+                    .ok(Json.createObjectBuilder()
+                            .add("linkset",
+                                    new SignpostingResources(systemConfig, dsv,
+                                            JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""),
+                                            JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse(""))
+                                                    .getJsonLinkset())
+                            .build())
+                    .type(MediaType.APPLICATION_JSON).build();
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+    }
+
     @GET
+    @AuthRequired
     @Path("{id}/modifyRegistration")
-    public Response updateDatasetTargetURL(@PathParam("id") String id ) {
+    public Response updateDatasetTargetURL(@Context ContainerRequestContext crc, @PathParam("id") String id ) {
         return response( req -> {
             execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req));
             return ok("Dataset " + id + " target url updated");
-        });
+        }, getRequestUser(crc));
     }
     
     @POST
+    @AuthRequired
     @Path("/modifyRegistrationAll")
-    public Response updateDatasetTargetURLAll() {
+    public Response updateDatasetTargetURLAll(@Context ContainerRequestContext crc) {
         return response( req -> {
             datasetService.findAll().forEach( ds -> {
                 try {
@@ -563,12 +714,13 @@ public Response updateDatasetTargetURLAll() {
                 }
             });
             return ok("Update All Dataset target url completed");
-        });
+        }, getRequestUser(crc));
     }
     
     @POST
+    @AuthRequired
     @Path("{id}/modifyRegistrationMetadata")
-    public Response updateDatasetPIDMetadata(@PathParam("id") String id) {
+    public Response updateDatasetPIDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id) {
 
         try {
             Dataset dataset = findDatasetOrDie(id);
@@ -583,12 +735,13 @@ public Response updateDatasetPIDMetadata(@PathParam("id") String id) {
             execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(id), req));
             List<String> args = Arrays.asList(id);
             return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args));
-        });
+        }, getRequestUser(crc));
     }
     
     @GET
+    @AuthRequired
     @Path("/modifyRegistrationPIDMetadataAll")
-    public Response updateDatasetPIDMetadataAll() {
+    public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc) {
         return response( req -> {
             datasetService.findAll().forEach( ds -> {
                 try {
@@ -598,22 +751,22 @@ public Response updateDatasetPIDMetadataAll() {
                 }
             });
             return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all"));
-        });
+        }, getRequestUser(crc));
     }
   
     @PUT
+    @AuthRequired
     @Path("{id}/versions/{versionId}")
     @Consumes(MediaType.APPLICATION_JSON)
-    public Response updateDraftVersion( String jsonBody, @PathParam("id") String id,  @PathParam("versionId") String versionId ){
-      
-        if ( ! ":draft".equals(versionId) ) {
-            return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated");
+    public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) {
+        if (!DS_VERSION_DRAFT.equals(versionId)) {
+            return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated");
         }
         
-        try ( StringReader rdr = new StringReader(jsonBody) ) {
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
+        try {
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
             Dataset ds = findDatasetOrDie(id);
-            JsonObject json = Json.createReader(rdr).readObject();
+            JsonObject json = JsonUtil.getJsonObject(jsonBody);
             DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json);
             
             // clear possibly stale fields from the incoming dataset version.
@@ -651,10 +804,7 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id,
                 }
                 managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion));
             }
-//            DatasetVersion managedVersion = execCommand( updateDraft
-//                                                             ? new UpdateDatasetVersionCommand(req, incomingVersion)
-//                                                             : new CreateDatasetVersionCommand(req, ds, incomingVersion));
-            return ok( json(managedVersion) );
+            return ok( json(managedVersion, true) );
                     
         } catch (JsonParseException ex) {
             logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex);
@@ -667,11 +817,12 @@ public Response updateDraftVersion( String jsonBody, @PathParam("id") String id,
     }
 
     @GET
+    @AuthRequired
     @Path("{id}/versions/{versionId}/metadata")
     @Produces("application/ld+json, application/json-ld")
-    public Response getVersionJsonLDMetadata(@PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
         try {
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
             DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers);
             OREMap ore = new OREMap(dsv,
                     settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false));
@@ -688,22 +839,27 @@ public Response getVersionJsonLDMetadata(@PathParam("id") String id, @PathParam(
     }
 
     @GET
+    @AuthRequired
     @Path("{id}/metadata")
     @Produces("application/ld+json, application/json-ld")
-    public Response getVersionJsonLDMetadata(@PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
-        return getVersionJsonLDMetadata(id, ":draft", uriInfo, headers);
+    public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+        return getVersionJsonLDMetadata(crc, id, DS_VERSION_DRAFT, uriInfo, headers);
     }
 
     @PUT
+    @AuthRequired
     @Path("{id}/metadata")
     @Consumes("application/ld+json, application/json-ld")
-    public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
+    public Response updateVersionMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) {
 
         try {
             Dataset ds = findDatasetOrDie(id);
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
-            DatasetVersion dsv = ds.getOrCreateEditVersion();
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
+            //Get draft state as of now
+
             boolean updateDraft = ds.getLatestVersion().isDraft();
+            //Get the current draft or create a new version to update
+            DatasetVersion dsv = ds.getOrCreateEditVersion();
             dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false, licenseSvc);
             dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
             boolean hasValidTerms = TermsOfUseAndAccessValidator.isTOUAValid(dsv.getTermsOfUseAndAccess(), null);
@@ -711,12 +867,8 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String
                 return error(Status.CONFLICT, BundleUtil.getStringFromBundle("dataset.message.toua.invalid"));
             }
             DatasetVersion managedVersion;
-            if (updateDraft) {
-                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
-                managedVersion = managedDataset.getOrCreateEditVersion();
-            } else {
-                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv));
-            }
+            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
+            managedVersion = managedDataset.getLatestVersion();
             String info = updateDraft ? "Version Updated" : "Version Created";
             return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
 
@@ -729,23 +881,23 @@ public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String
     }
 
     @PUT
+    @AuthRequired
     @Path("{id}/metadata/delete")
     @Consumes("application/ld+json, application/json-ld")
-    public Response deleteMetadata(String jsonLDBody, @PathParam("id") String id) {
+    public Response deleteMetadata(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("id") String id) {
         try {
             Dataset ds = findDatasetOrDie(id);
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
-            DatasetVersion dsv = ds.getOrCreateEditVersion();
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
+            //Get draft state as of now
+
             boolean updateDraft = ds.getLatestVersion().isDraft();
+            //Get the current draft or create a new version to update
+            DatasetVersion dsv = ds.getOrCreateEditVersion();
             dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, licenseSvc);
             dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
             DatasetVersion managedVersion;
-            if (updateDraft) {
-                Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
-                managedVersion = managedDataset.getOrCreateEditVersion();
-            } else {
-                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv));
-            }
+            Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req));
+            managedVersion = managedDataset.getLatestVersion();
             String info = updateDraft ? "Version Updated" : "Version Created";
             return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate()));
 
@@ -760,19 +912,21 @@ public Response deleteMetadata(String jsonLDBody, @PathParam("id") String id) {
     }
 
     @PUT
+    @AuthRequired
     @Path("{id}/deleteMetadata")
-    public Response deleteVersionMetadata(String jsonBody, @PathParam("id") String id) throws WrappedResponse {
+    public Response deleteVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id) throws WrappedResponse {
 
-        DataverseRequest req = createDataverseRequest(findUserOrDie());
+        DataverseRequest req = createDataverseRequest(getRequestUser(crc));
 
         return processDatasetFieldDataDelete(jsonBody, id, req);
     }
 
     private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) {
-        try (StringReader rdr = new StringReader(jsonBody)) {
+        try {
 
             Dataset ds = findDatasetOrDie(id);
-            JsonObject json = Json.createReader(rdr).readObject();
+            JsonObject json = JsonUtil.getJsonObject(jsonBody);
+            //Get the current draft or create a new version to update
             DatasetVersion dsv = ds.getOrCreateEditVersion();
             dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
             List<DatasetField> fields = new LinkedList<>();
@@ -884,11 +1038,8 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav
             }
 
 
-            boolean updateDraft = ds.getLatestVersion().isDraft();
-            DatasetVersion managedVersion = updateDraft
-                    ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getOrCreateEditVersion()
-                    : execCommand(new CreateDatasetVersionCommand(req, ds, dsv));
-            return ok(json(managedVersion));
+            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
+            return ok(json(managedVersion, true));
 
         } catch (JsonParseException ex) {
             logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
@@ -915,27 +1066,24 @@ private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){
     }
     
     @PUT
+    @AuthRequired
     @Path("{id}/editMetadata")
-    public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
+    public Response editVersionMetadata(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) {
 
         Boolean replaceData = replace != null;
         DataverseRequest req = null;
-        try {
-         req = createDataverseRequest(findUserOrDie());
-        } catch (WrappedResponse ex) {
-            logger.log(Level.SEVERE, "Edit metdata error: " + ex.getMessage(), ex);
-            return ex.getResponse();
-        }
+        req = createDataverseRequest(getRequestUser(crc));
 
         return processDatasetUpdate(jsonBody, id, req, replaceData);
     }
     
     
     private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){
-        try (StringReader rdr = new StringReader(jsonBody)) {
+        try {
            
             Dataset ds = findDatasetOrDie(id);
-            JsonObject json = Json.createReader(rdr).readObject();
+            JsonObject json = JsonUtil.getJsonObject(jsonBody);
+            //Get the current draft or create a new version to update
             DatasetVersion dsv = ds.getOrCreateEditVersion();
             dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv);
             List<DatasetField> fields = new LinkedList<>();
@@ -1038,16 +1186,9 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque
                     dsv.getDatasetFields().add(updateField);
                 }
             }
-            boolean updateDraft = ds.getLatestVersion().isDraft();
-            DatasetVersion managedVersion;
+            DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion();
 
-            if (updateDraft) {
-                managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getOrCreateEditVersion();
-            } else {
-                managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv));
-            }
-
-            return ok(json(managedVersion));
+            return ok(json(managedVersion, true));
 
         } catch (JsonParseException ex) {
             logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex);
@@ -1082,22 +1223,24 @@ private String validateDatasetFieldValues(List<DatasetField> fields) {
      * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431
      */
     @GET
+    @AuthRequired
     @Path("{id}/actions/:publish")
     @Deprecated
-    public Response publishDataseUsingGetDeprecated( @PathParam("id") String id, @QueryParam("type") String type ) {
+    public Response publishDataseUsingGetDeprecated(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type ) {
         logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated.");
-        return publishDataset(id, type, false);
+        return publishDataset(crc, id, type, false);
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/actions/:publish")
-    public Response publishDataset(@PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
+    public Response publishDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) {
         try {
             if (type == null) {
                 return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent').");
             }
             boolean updateCurrent=false;
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             type = type.toLowerCase();
             boolean isMinor=false;
             switch (type) {
@@ -1198,7 +1341,7 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S
                     return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg);
                 } else {
                     return Response.ok(Json.createObjectBuilder()
-                            .add("status", STATUS_OK)
+                            .add("status", ApiConstants.STATUS_OK)
                             .add("status_details", successMsg)
                             .add("data", json(ds)).build())
                             .type(MediaType.APPLICATION_JSON)
@@ -1216,11 +1359,12 @@ public Response publishDataset(@PathParam("id") String id, @QueryParam("type") S
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/actions/:releasemigrated")
     @Consumes("application/ld+json, application/json-ld")
-    public Response publishMigratedDataset(String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
+    public Response publishMigratedDataset(@Context ContainerRequestContext crc, String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) {
         try {
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             if (!user.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets");
             }
@@ -1306,10 +1450,11 @@ public Response publishMigratedDataset(String jsonldBody, @PathParam("id") Strin
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/move/{targetDataverseAlias}")
-    public Response moveDataset(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
+    public Response moveDataset(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             Dataset ds = findDatasetOrDie(id);
             Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
             if (target == null) {
@@ -1330,13 +1475,14 @@ public Response moveDataset(@PathParam("id") String id, @PathParam("targetDatave
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/files/actions/:set-embargo")
-    public Response createFileEmbargo(@PathParam("id") String id, String jsonBody){
+    public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
 
         // user is authenticated
         AuthenticatedUser authenticatedUser = null;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Status.UNAUTHORIZED, "Authentication is required.");
         }
@@ -1383,8 +1529,7 @@ public Response createFileEmbargo(@PathParam("id") String id, String jsonBody){
             return error(Status.BAD_REQUEST, "No Embargoes allowed");
         }
 
-        StringReader rdr = new StringReader(jsonBody);
-        JsonObject json = Json.createReader(rdr).readObject();
+        JsonObject json = JsonUtil.getJsonObject(jsonBody);
 
         Embargo embargo = new Embargo();
 
@@ -1440,7 +1585,7 @@ public Response createFileEmbargo(@PathParam("id") String id, String jsonBody){
             }
             if (badFiles) {
                 return Response.status(Status.FORBIDDEN)
-                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", STATUS_ERROR)
+                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
                                 .add("message", "You do not have permission to embargo the following files")
                                 .add("files", restrictedFiles).build())
                         .type(MediaType.APPLICATION_JSON_TYPE).build();
@@ -1486,13 +1631,14 @@ public Response createFileEmbargo(@PathParam("id") String id, String jsonBody){
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/files/actions/:unset-embargo")
-    public Response removeFileEmbargo(@PathParam("id") String id, String jsonBody){
+    public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathParam("id") String id, String jsonBody){
 
         // user is authenticated
         AuthenticatedUser authenticatedUser = null;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Status.UNAUTHORIZED, "Authentication is required.");
         }
@@ -1526,8 +1672,7 @@ public Response removeFileEmbargo(@PathParam("id") String id, String jsonBody){
             return error(Status.BAD_REQUEST, "No Embargoes allowed");
         }
 
-        StringReader rdr = new StringReader(jsonBody);
-        JsonObject json = Json.createReader(rdr).readObject();
+        JsonObject json = JsonUtil.getJsonObject(jsonBody);
 
         List<DataFile> datasetFiles = dataset.getFiles();
         List<DataFile> embargoFilesToUnset = new LinkedList<>();
@@ -1559,7 +1704,7 @@ public Response removeFileEmbargo(@PathParam("id") String id, String jsonBody){
             }
             if (badFiles) {
                 return Response.status(Status.FORBIDDEN)
-                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", STATUS_ERROR)
+                        .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", ApiConstants.STATUS_ERROR)
                                 .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes")
                                 .add("files", restrictedFiles).build())
                         .type(MediaType.APPLICATION_JSON_TYPE).build();
@@ -1597,10 +1742,11 @@ public Response removeFileEmbargo(@PathParam("id") String id, String jsonBody){
 
 
     @PUT
+    @AuthRequired
     @Path("{linkedDatasetId}/link/{linkingDataverseAlias}")
-    public Response linkDataset(@PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
+    public Response linkDataset(@Context ContainerRequestContext crc, @PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             Dataset linked = findDatasetOrDie(linkedDatasetId);
             Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
             if (linked == null){
@@ -1629,7 +1775,7 @@ public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versio
                 return error(Status.NOT_FOUND, "This Dataset has no custom license");
             }
             persistentId = getRequestParameter(":persistentId".substring(1));
-            if (versionId.equals(":draft")) {
+            if (versionId.equals(DS_VERSION_DRAFT)) {
                 versionId = "DRAFT";
             }
         } catch (WrappedResponse wrappedResponse) {
@@ -1641,10 +1787,11 @@ public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versio
 
 
     @GET
+    @AuthRequired
     @Path("{id}/links")
-    public Response getLinks(@PathParam("id") String idSupplied ) {
+    public Response getLinks(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied ) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             if (!u.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Not a superuser");
             }
@@ -1671,8 +1818,9 @@ public Response getLinks(@PathParam("id") String idSupplied ) {
      * @param apiKey
      */
     @POST
+    @AuthRequired
     @Path("{identifier}/assignments")
-    public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
+    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) {
         try {
             Dataset dataset = findDatasetOrDie(id);
             
@@ -1700,7 +1848,7 @@ public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier")
 
             String privateUrlToken = null;
             return ok(
-                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(findUserOrDie()), privateUrlToken))));
+                    json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(getRequestUser(crc)), privateUrlToken))));
         } catch (WrappedResponse ex) {
             List<String> args = Arrays.asList(ex.getMessage());
             logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args));
@@ -1710,13 +1858,14 @@ public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier")
     }
     
     @DELETE
+    @AuthRequired
     @Path("{identifier}/assignments/{id}")
-    public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
+    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dsId) {
         RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
         if (ra != null) {
             try {
                 findDatasetOrDie(dsId);
-                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(findUserOrDie())));
+                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
                 List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter));
                 return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args));
             } catch (WrappedResponse ex) {
@@ -1729,38 +1878,42 @@ public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam(
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/assignments")
-    public Response getAssignments(@PathParam("identifier") String id) {
+    public Response getAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
         return response(req ->
                 ok(execCommand(
                         new ListRoleAssignments(req, findDatasetOrDie(id)))
-                        .stream().map(ra -> json(ra)).collect(toJsonArray())));
+                        .stream().map(ra -> json(ra)).collect(toJsonArray())), getRequestUser(crc));
     }
 
     @GET
+    @AuthRequired
     @Path("{id}/privateUrl")
-    public Response getPrivateUrlData(@PathParam("id") String idSupplied) {
+    public Response getPrivateUrlData(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         return response( req -> {
             PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied)));
             return (privateUrl != null) ? ok(json(privateUrl))
                     : error(Response.Status.NOT_FOUND, "Private URL not found.");
-        });
+        }, getRequestUser(crc));
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/privateUrl")
-    public Response createPrivateUrl(@PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
+    public Response createPrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) {
         if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) {
             throw new NotAcceptableException("Anonymized Access not enabled");
         }
         return response(req ->
                 ok(json(execCommand(
-                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))));
+                new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess)))), getRequestUser(crc));
     }
 
     @DELETE
+    @AuthRequired
     @Path("{id}/privateUrl")
-    public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
+    public Response deletePrivateUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         return response( req -> {
             Dataset dataset = findDatasetOrDie(idSupplied);
             PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset));
@@ -1770,20 +1923,17 @@ public Response deletePrivateUrl(@PathParam("id") String idSupplied) {
             } else {
                 return notFound("No Private URL to delete.");
             }
-        });
+        }, getRequestUser(crc));
     }
 
     @GET
+    @AuthRequired
     @Path("{id}/thumbnail/candidates")
-    public Response getDatasetThumbnailCandidates(@PathParam("id") String idSupplied) {
+    public Response getDatasetThumbnailCandidates(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         try {
             Dataset dataset = findDatasetOrDie(idSupplied);
             boolean canUpdateThumbnail = false;
-            try {
-                canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
-            } catch (WrappedResponse ex) {
-                logger.info("Exception thrown while trying to figure out permissions while getting thumbnail for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage());
-            }
+            canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetThumbnailCommand.class);
             if (!canUpdateThumbnail) {
                 return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates.");
             }
@@ -1824,12 +1974,29 @@ public Response getDatasetThumbnail(@PathParam("id") String idSupplied) {
         }
     }
 
+    @GET
+    @Produces({ "image/png" })
+    @Path("{id}/logo")
+    public Response getDatasetLogo(@PathParam("id") String idSupplied) {
+        try {
+            Dataset dataset = findDatasetOrDie(idSupplied);
+            InputStream is = DatasetUtil.getLogoAsInputStream(dataset);
+            if (is == null) {
+                return notFound("Logo not available");
+            }
+            return Response.ok(is).build();
+        } catch (WrappedResponse wr) {
+            return notFound("Logo not available");
+        }
+    }
+
     // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers.
     @POST
+    @AuthRequired
     @Path("{id}/thumbnail/{dataFileId}")
-    public Response setDataFileAsThumbnail(@PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
+    public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) {
         try {
-            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
+            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null));
             return ok("Thumbnail set to " + datasetThumbnail.getBase64image());
         } catch (WrappedResponse wr) {
             return wr.getResponse();
@@ -1837,12 +2004,12 @@ public Response setDataFileAsThumbnail(@PathParam("id") String idSupplied, @Path
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/thumbnail")
     @Consumes(MediaType.MULTIPART_FORM_DATA)
-    public Response uploadDatasetLogo(@PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream
-    ) {
+    public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
         try {
-            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
+            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
             return ok("Thumbnail is now " + datasetThumbnail.getBase64image());
         } catch (WrappedResponse wr) {
             return wr.getResponse();
@@ -1850,10 +2017,11 @@ public Response uploadDatasetLogo(@PathParam("id") String idSupplied, @FormDataP
     }
 
     @DELETE
+    @AuthRequired
     @Path("{id}/thumbnail")
-    public Response removeDatasetLogo(@PathParam("id") String idSupplied) {
+    public Response removeDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         try {
-            DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
+            execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null));
             return ok("Dataset thumbnail removed.");
         } catch (WrappedResponse wr) {
             return wr.getResponse();
@@ -1861,8 +2029,9 @@ public Response removeDatasetLogo(@PathParam("id") String idSupplied) {
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/dataCaptureModule/rsync")
-    public Response getRsync(@PathParam("identifier") String id) {
+    public Response getRsync(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
         //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs?
         if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
             return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + ".");
@@ -1870,7 +2039,7 @@ public Response getRsync(@PathParam("identifier") String id) {
         Dataset dataset = null;
         try {
             dataset = findDatasetOrDie(id);
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset));
             
             DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded");
@@ -1901,12 +2070,13 @@ public Response getRsync(@PathParam("identifier") String id) {
      * -MAD 4.9.1
      */
     @POST
+    @AuthRequired
     @Path("{identifier}/dataCaptureModule/checksumValidation")
-    public Response receiveChecksumValidationResults(@PathParam("identifier") String id, JsonObject jsonFromDcm) {
+    public Response receiveChecksumValidationResults(@Context ContainerRequestContext crc, @PathParam("identifier") String id, JsonObject jsonFromDcm) {
         logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm);
         AuthenticatedUser authenticatedUser = null;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Response.Status.BAD_REQUEST, "Authentication is required.");
         }
@@ -1929,7 +2099,7 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String
 
                     ImportMode importMode = ImportMode.MERGE;
                     try {
-                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(findUserOrDie()), dataset, uploadFolder, new Long(totalSize), importMode));
+                        JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(getRequestUser(crc)), dataset, uploadFolder, new Long(totalSize), importMode));
                         long jobId = jsonFromImportJobKickoff.getInt("executionId");
                         String message = jsonFromImportJobKickoff.getString("message");
                         JsonObjectBuilder job = Json.createObjectBuilder();
@@ -2008,10 +2178,11 @@ public Response receiveChecksumValidationResults(@PathParam("identifier") String
     
 
     @POST
+    @AuthRequired
     @Path("{id}/submitForReview")
-    public Response submitForReview(@PathParam("id") String idSupplied) {
+    public Response submitForReview(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         try {
-            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied)));
+            Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied)));
             JsonObjectBuilder result = Json.createObjectBuilder();
             
             boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview);
@@ -2025,14 +2196,14 @@ public Response submitForReview(@PathParam("id") String idSupplied) {
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/returnToAuthor")
-    public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBody) {
-        
+    public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, String jsonBody) {
+
         if (jsonBody == null || jsonBody.isEmpty()) {
             return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn).");
         }
-        StringReader rdr = new StringReader(jsonBody);
-        JsonObject json = Json.createReader(rdr).readObject();
+        JsonObject json = JsonUtil.getJsonObject(jsonBody);
         try {
             Dataset dataset = findDatasetOrDie(idSupplied);
             String reasonForReturn = null;
@@ -2041,7 +2212,7 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo
             if (reasonForReturn == null || reasonForReturn.isEmpty()) {
                 return error(Response.Status.BAD_REQUEST, "You must enter a reason for returning a dataset to the author(s).");
             }
-            AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie();
+            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn ));
 
             JsonObjectBuilder result = Json.createObjectBuilder();
@@ -2054,13 +2225,15 @@ public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBo
     }
 
     @GET
+    @AuthRequired
     @Path("{id}/curationStatus")
-    public Response getCurationStatus(@PathParam("id") String idSupplied) {
+    public Response getCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         try {
             Dataset ds = findDatasetOrDie(idSupplied);
             DatasetVersion dsv = ds.getLatestVersion();
-            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), ds).has(Permission.PublishDataset)) {
-                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()));
+            User user = getRequestUser(crc);
+            if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(user), ds).has(Permission.PublishDataset)) {
+                return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel()), user);
             } else {
                 return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset.");
             }
@@ -2070,13 +2243,14 @@ public Response getCurationStatus(@PathParam("id") String idSupplied) {
     }
 
     @PUT
+    @AuthRequired
     @Path("{id}/curationStatus")
-    public Response setCurationStatus(@PathParam("id") String idSupplied, @QueryParam("label") String label) {
+    public Response setCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("label") String label) {
         Dataset ds = null;
         User u = null;
         try {
             ds = findDatasetOrDie(idSupplied);
-            u = findUserOrDie();
+            u = getRequestUser(crc);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
@@ -2090,13 +2264,14 @@ public Response setCurationStatus(@PathParam("id") String idSupplied, @QueryPara
     }
 
     @DELETE
+    @AuthRequired
     @Path("{id}/curationStatus")
-    public Response deleteCurationStatus(@PathParam("id") String idSupplied) {
+    public Response deleteCurationStatus(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         Dataset ds = null;
         User u = null;
         try {
             ds = findDatasetOrDie(idSupplied);
-            u = findUserOrDie();
+            u = getRequestUser(crc);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
@@ -2110,19 +2285,15 @@ public Response deleteCurationStatus(@PathParam("id") String idSupplied) {
     }
 
     @GET
+    @AuthRequired
     @Path("{id}/uploadsid")
     @Deprecated
-    public Response getUploadUrl(@PathParam("id") String idSupplied) {
+    public Response getUploadUrl(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         try {
             Dataset dataset = findDatasetOrDie(idSupplied);
 
             boolean canUpdateDataset = false;
-            try {
-                canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset).canIssue(UpdateDatasetVersionCommand.class);
-            } catch (WrappedResponse ex) {
-                logger.info("Exception thrown while trying to figure out permissions while getting upload URL for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage());
-                throw ex;
-            }
+            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset).canIssue(UpdateDatasetVersionCommand.class);
             if (!canUpdateDataset) {
                 return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
             }
@@ -2150,21 +2321,15 @@ public Response getUploadUrl(@PathParam("id") String idSupplied) {
     }
 
     @GET
+    @AuthRequired
     @Path("{id}/uploadurls")
-    public Response getMPUploadUrls(@PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
+    public Response getMPUploadUrls(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("size") long fileSize) {
         try {
             Dataset dataset = findDatasetOrDie(idSupplied);
 
             boolean canUpdateDataset = false;
-            try {
-                canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset)
-                        .canIssue(UpdateDatasetVersionCommand.class);
-            } catch (WrappedResponse ex) {
-                logger.info(
-                        "Exception thrown while trying to figure out permissions while getting upload URLs for dataset id "
-                                + dataset.getId() + ": " + ex.getLocalizedMessage());
-                throw ex;
-            }
+            canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(getRequestUser(crc)), dataset)
+                    .canIssue(UpdateDatasetVersionCommand.class);
             if (!canUpdateDataset) {
                 return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset.");
             }
@@ -2193,15 +2358,16 @@ public Response getMPUploadUrls(@PathParam("id") String idSupplied, @QueryParam(
     }
 
     @DELETE
+    @AuthRequired
     @Path("mpupload")
-    public Response abortMPUpload(@QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
+    public Response abortMPUpload(@Context ContainerRequestContext crc, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
         try {
             Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
             //Allow the API to be used within a session (e.g. for direct upload in the UI)
             User user = session.getUser();
             if (!user.isAuthenticated()) {
                 try {
-                    user = findAuthenticatedUserOrDie();
+                    user = getRequestAuthenticatedUserOrDie(crc);
                 } catch (WrappedResponse ex) {
                     logger.info(
                             "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id "
@@ -2247,15 +2413,16 @@ public Response abortMPUpload(@QueryParam("globalid") String idSupplied, @QueryP
     }
 
     @PUT
+    @AuthRequired
     @Path("mpupload")
-    public Response completeMPUpload(String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
+    public Response completeMPUpload(@Context ContainerRequestContext crc, String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) {
         try {
             Dataset dataset = datasetSvc.findByGlobalId(idSupplied);
             //Allow the API to be used within a session (e.g. for direct upload in the UI)
             User user = session.getUser();
             if (!user.isAuthenticated()) {
                 try {
-                    user = findAuthenticatedUserOrDie();
+                    user = getRequestAuthenticatedUserOrDie(crc);
                 } catch (WrappedResponse ex) {
                     logger.info(
                             "Exception thrown while trying to figure out permissions to complete mpupload for dataset id "
@@ -2288,9 +2455,7 @@ public Response completeMPUpload(String partETagBody, @QueryParam("globalid") St
             List<PartETag> eTagList = new ArrayList<PartETag>();
             logger.info("Etags: " + partETagBody);
             try {
-                JsonReader jsonReader = Json.createReader(new StringReader(partETagBody));
-                JsonObject object = jsonReader.readObject();
-                jsonReader.close();
+                JsonObject object = JsonUtil.getJsonObject(partETagBody);
                 for (String partNo : object.keySet()) {
                     eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo)));
                 }
@@ -2332,9 +2497,11 @@ public Response completeMPUpload(String partETagBody, @QueryParam("globalid") St
      * @return
      */
     @POST
+    @AuthRequired
     @Path("{id}/add")
     @Consumes(MediaType.MULTIPART_FORM_DATA)
-    public Response addFileToDataset(@PathParam("id") String idSupplied,
+    public Response addFileToDataset(@Context ContainerRequestContext crc,
+                    @PathParam("id") String idSupplied,
                     @FormDataParam("jsonData") String jsonData,
                     @FormDataParam("file") InputStream fileInputStream,
                     @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
@@ -2346,18 +2513,11 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
         }
 
         // -------------------------------------
-        // (1) Get the user from the API key
+        // (1) Get the user from the ContainerRequestContext
         // -------------------------------------
         User authUser;
-        try {
-            authUser = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.FORBIDDEN,
-                    BundleUtil.getStringFromBundle("file.addreplace.error.auth")
-            );
-        }
-        
-        
+        authUser = getRequestUser(crc);
+
         // -------------------------------------
         // (2) Get the Dataset Id
         //  
@@ -2494,7 +2654,7 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
                 } else {
                     return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder());
                 }
-                
+
                 //"Look at that!  You added a file! (hey hey, it may have worked)");
             } catch (NoFilesException ex) {
                 Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex);
@@ -2513,17 +2673,11 @@ public Response addFileToDataset(@PathParam("id") String idSupplied,
      * @return
      */
     @GET
+    @AuthRequired
     @Path("{id}/cleanStorage")
-    public Response cleanStorage(@PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
+    public Response cleanStorage(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @QueryParam("dryrun") Boolean dryrun) {
         // get user and dataset
-        User authUser;
-        try {
-            authUser = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.FORBIDDEN,
-                    BundleUtil.getStringFromBundle("file.addreplace.error.auth")
-            );
-        }
+        User authUser = getRequestUser(crc);
 
         Dataset dataset;
         try {
@@ -2595,11 +2749,11 @@ private void msgt(String m) {
     public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
             throws WrappedResponse {
         switch (versionId) {
-            case ":latest":
+            case DS_VERSION_LATEST:
                 return hdl.handleLatest();
-            case ":draft":
+            case DS_VERSION_DRAFT:
                 return hdl.handleDraft();
-            case ":latest-published":
+            case DS_VERSION_LATEST_PUBLISHED:
                 return hdl.handleLatestPublished();
             default:
                 try {
@@ -2619,11 +2773,15 @@ public static <T> T handleVersion(String versionId, DsVersionHandler<T> hdl)
     }
 
     private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse {
+        return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, false);
+    }
+
+    private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse {
         DatasetVersion dsv = execCommand(handleVersion(versionNumber, new DsVersionHandler<Command<DatasetVersion>>() {
 
             @Override
             public Command<DatasetVersion> handleLatest() {
-                return new GetLatestAccessibleDatasetVersionCommand(req, ds);
+                return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned);
             }
 
             @Override
@@ -2633,12 +2791,12 @@ public Command<DatasetVersion> handleDraft() {
 
             @Override
             public Command<DatasetVersion> handleSpecific(long major, long minor) {
-                return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor);
+                return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned);
             }
 
             @Override
             public Command<DatasetVersion> handleLatestPublished() {
-                return new GetLatestPublishedDatasetVersionCommand(req, ds);
+                return new GetLatestPublishedDatasetVersionCommand(req, ds, includeDeaccessioned);
             }
         }));
         if (dsv == null || dsv.getId() == null) {
@@ -2679,12 +2837,13 @@ public Response getLocksForDataset(@PathParam("identifier") String id, @QueryPar
     }
 
     @DELETE
+    @AuthRequired
     @Path("{identifier}/locks")
-    public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
+    public Response deleteLocks(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) {
 
         return response(req -> {
             try {
-                AuthenticatedUser user = findAuthenticatedUserOrDie();
+                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
                 if (!user.isSuperuser()) {
                     return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
                 }
@@ -2703,14 +2862,7 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ
                         }
                         // kick of dataset reindexing, in case the locks removed 
                         // affected the search card:
-                        try {
-                            indexService.indexDataset(dataset, true);
-                        } catch (IOException | SolrServerException e) {
-                            String failureLogText = "Post lock removal indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-                            failureLogText += "\r\n" + e.getLocalizedMessage();
-                            LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset);
-
-                        }
+                        indexService.asyncIndexDataset(dataset, true);
                         return ok("locks removed");
                     }
                     return ok("dataset not locked");
@@ -2723,14 +2875,7 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ
                     dataset = findDatasetOrDie(id);
                     // ... and kick of dataset reindexing, in case the lock removed 
                     // affected the search card:
-                    try {
-                        indexService.indexDataset(dataset, true);
-                    } catch (IOException | SolrServerException e) {
-                        String failureLogText = "Post lock removal indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-                        failureLogText += "\r\n" + e.getLocalizedMessage();
-                        LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset);
-
-                    }
+                    indexService.asyncIndexDataset(dataset, true);
                     return ok("lock type " + lock.getReason() + " removed");
                 }
                 return ok("no lock type " + lockType + " on the dataset");
@@ -2738,16 +2883,17 @@ public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("typ
                 return wr.getResponse();
             }
 
-        });
+        }, getRequestUser(crc));
 
     }
     
     @POST
+    @AuthRequired
     @Path("{identifier}/lock/{type}")
-    public Response lockDataset(@PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
+    public Response lockDataset(@Context ContainerRequestContext crc, @PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) {
         return response(req -> {
             try {
-                AuthenticatedUser user = findAuthenticatedUserOrDie();
+                AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
                 if (!user.isSuperuser()) {
                     return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only.");
                 }
@@ -2761,26 +2907,20 @@ public Response lockDataset(@PathParam("identifier") String id, @PathParam("type
                 // refresh the dataset:
                 dataset = findDatasetOrDie(id);
                 // ... and kick of dataset reindexing:
-                try {
-                    indexService.indexDataset(dataset, true);
-                } catch (IOException | SolrServerException e) {
-                    String failureLogText = "Post add lock indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-                    failureLogText += "\r\n" + e.getLocalizedMessage();
-                    LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset);
-
-                }
+                indexService.asyncIndexDataset(dataset, true);
 
                 return ok("dataset locked with lock type " + lockType);
             } catch (WrappedResponse wr) {
                 return wr.getResponse();
             }
 
-        });
+        }, getRequestUser(crc));
     }
     
     @GET
+    @AuthRequired
     @Path("locks")
-    public Response listLocks(@QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {        
+    public Response listLocks(@Context ContainerRequestContext crc, @QueryParam("type") String lockType, @QueryParam("userIdentifier") String userIdentifier) { //DatasetLock.Reason lockType) {
         // This API is here, under /datasets, and not under /admin, because we
         // likely want it to be accessible to admin users who may not necessarily 
         // have localhost access, that would be required to get to /api/admin in 
@@ -2788,7 +2928,7 @@ public Response listLocks(@QueryParam("type") String lockType, @QueryParam("user
         // this api to admin users only.
         AuthenticatedUser apiUser;
         try {
-            apiUser = findAuthenticatedUserOrDie();
+            apiUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
         }
@@ -2869,23 +3009,58 @@ public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSup
         String nullCurrentMonth = null;
         return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country);
     }
-    
+
     @GET
+    @AuthRequired
     @Path("{identifier}/storagesize")
-    public Response getStorageSize(@PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached,
-                                   @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
-
+    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) {
         return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"),
-                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))));
+                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc));
     }
-    
+
     @GET
+    @AuthRequired
     @Path("{identifier}/versions/{versionId}/downloadsize")
-    public Response getDownloadSize(@PathParam("identifier") String dvIdtf, @PathParam("versionId") String version,
-                                    @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
+    public Response getDownloadSize(@Context ContainerRequestContext crc,
+                                    @PathParam("identifier") String dvIdtf,
+                                    @PathParam("versionId") String version,
+                                    @QueryParam("contentType") String contentType,
+                                    @QueryParam("accessStatus") String accessStatus,
+                                    @QueryParam("categoryName") String categoryName,
+                                    @QueryParam("tabularTagName") String tabularTagName,
+                                    @QueryParam("searchText") String searchText,
+                                    @QueryParam("mode") String mode,
+                                    @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
+                                    @Context UriInfo uriInfo,
+                                    @Context HttpHeaders headers) {
 
-        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"),
-                execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers))))));
+        return response(req -> {
+            FileSearchCriteria fileSearchCriteria;
+            try {
+                fileSearchCriteria = new FileSearchCriteria(
+                        contentType,
+                        accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null,
+                        categoryName,
+                        tabularTagName,
+                        searchText
+                );
+            } catch (IllegalArgumentException e) {
+                return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus)));
+            }
+            DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode;
+            try {
+                fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All;
+            } catch (IllegalArgumentException e) {
+                return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode);
+            }
+            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned);
+            long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode);
+            String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize);
+            JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+            jsonObjectBuilder.add("message", message);
+            jsonObjectBuilder.add("storageSize", datasetStorageSize);
+            return ok(jsonObjectBuilder);
+        }, getRequestUser(crc));
     }
 
     @GET
@@ -2998,8 +3173,9 @@ public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @Path
     }
     
     @GET
+    @AuthRequired
     @Path("{identifier}/storageDriver")
-    public Response getFileStore(@PathParam("identifier") String dvIdtf,
+    public Response getFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
             @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { 
         
         Dataset dataset; 
@@ -3010,19 +3186,20 @@ public Response getFileStore(@PathParam("identifier") String dvIdtf,
             return error(Response.Status.NOT_FOUND, "No such dataset");
         }
         
-        return response(req -> ok(dataset.getEffectiveStorageDriverId()));
+        return response(req -> ok(dataset.getEffectiveStorageDriverId()), getRequestUser(crc));
     }
     
     @PUT
+    @AuthRequired
     @Path("{identifier}/storageDriver")
-    public Response setFileStore(@PathParam("identifier") String dvIdtf,
+    public Response setFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
             String storageDriverLabel,
             @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
         
         // Superuser-only:
         AuthenticatedUser user;
         try {
-            user = findAuthenticatedUserOrDie();
+            user = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Response.Status.BAD_REQUEST, "Authentication is required.");
         }
@@ -3051,14 +3228,15 @@ public Response setFileStore(@PathParam("identifier") String dvIdtf,
     }
     
     @DELETE
+    @AuthRequired
     @Path("{identifier}/storageDriver")
-    public Response resetFileStore(@PathParam("identifier") String dvIdtf,
+    public Response resetFileStore(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
             @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
         
         // Superuser-only:
         AuthenticatedUser user;
         try {
-            user = findAuthenticatedUserOrDie();
+            user = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Response.Status.BAD_REQUEST, "Authentication is required.");
         }
@@ -3080,12 +3258,13 @@ public Response resetFileStore(@PathParam("identifier") String dvIdtf,
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/curationLabelSet")
-    public Response getCurationLabelSet(@PathParam("identifier") String dvIdtf,
+    public Response getCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
             @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
 
         try {
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             if (!user.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -3101,19 +3280,22 @@ public Response getCurationLabelSet(@PathParam("identifier") String dvIdtf,
             return ex.getResponse();
         }
 
-        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()));
+        return response(req -> ok(dataset.getEffectiveCurationLabelSetName()), getRequestUser(crc));
     }
 
     @PUT
+    @AuthRequired
     @Path("{identifier}/curationLabelSet")
-    public Response setCurationLabelSet(@PathParam("identifier") String dvIdtf,
-            @QueryParam("name") String curationLabelSet,
-            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
+    public Response setCurationLabelSet(@Context ContainerRequestContext crc,
+                                        @PathParam("identifier") String dvIdtf,
+                                        @QueryParam("name") String curationLabelSet,
+                                        @Context UriInfo uriInfo,
+                                        @Context HttpHeaders headers) throws WrappedResponse {
 
         // Superuser-only:
         AuthenticatedUser user;
         try {
-            user = findAuthenticatedUserOrDie();
+            user = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Response.Status.UNAUTHORIZED, "Authentication is required.");
         }
@@ -3146,14 +3328,15 @@ public Response setCurationLabelSet(@PathParam("identifier") String dvIdtf,
     }
 
     @DELETE
+    @AuthRequired
     @Path("{identifier}/curationLabelSet")
-    public Response resetCurationLabelSet(@PathParam("identifier") String dvIdtf,
+    public Response resetCurationLabelSet(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
             @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
 
         // Superuser-only:
         AuthenticatedUser user;
         try {
-            user = findAuthenticatedUserOrDie();
+            user = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Response.Status.BAD_REQUEST, "Authentication is required.");
         }
@@ -3175,12 +3358,15 @@ public Response resetCurationLabelSet(@PathParam("identifier") String dvIdtf,
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/allowedCurationLabels")
-    public Response getAllowedCurationLabels(@PathParam("identifier") String dvIdtf,
-            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
+    public Response getAllowedCurationLabels(@Context ContainerRequestContext crc,
+                                             @PathParam("identifier") String dvIdtf,
+                                             @Context UriInfo uriInfo,
+                                             @Context HttpHeaders headers) throws WrappedResponse {
         AuthenticatedUser user = null;
         try {
-            user = findAuthenticatedUserOrDie();
+            user = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
@@ -3194,22 +3380,23 @@ public Response getAllowedCurationLabels(@PathParam("identifier") String dvIdtf,
         }
         if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) {
             String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName());
-            return response(req -> ok(String.join(",", labelArray)));
+            return response(req -> ok(String.join(",", labelArray)), getRequestUser(crc));
         } else {
             return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset.");
         }
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/timestamps")
     @Produces(MediaType.APPLICATION_JSON)
-    public Response getTimestamps(@PathParam("identifier") String id) {
+    public Response getTimestamps(@Context ContainerRequestContext crc, @PathParam("identifier") String id) {
 
         Dataset dataset = null;
         DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
         try {
             dataset = findDatasetOrDie(id);
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             Set<Permission> perms = new HashSet<Permission>();
             perms.add(Permission.ViewUnpublishedDataset);
             boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms);
@@ -3274,13 +3461,246 @@ public Response getTimestamps(@PathParam("identifier") String id) {
     }
 
 
+/****************************
+ * Globus Support Section:
+ * 
+ * Globus transfer in (upload) and out (download) involve three basic steps: The
+ * app is launched and makes a callback to the
+ * globusUploadParameters/globusDownloadParameters method to get all of the info
+ * needed to set up it's display.
+ * 
+ * At some point after that, the user will make a selection as to which files to
+ * transfer and the app will call requestGlobusUploadPaths/requestGlobusDownload
+ * to indicate a transfer is about to start. In addition to providing the
+ * details of where to transfer the files to/from, Dataverse also grants the
+ * Globus principal involved the relevant rw or r permission for the dataset.
+ * 
+ * Once the transfer is started, the app records the task id and sends it to
+ * Dataverse in the addGlobusFiles/monitorGlobusDownload call. Dataverse then
+ * monitors the transfer task and when it ultimately succeeds for fails it
+ * revokes the principal's permission and, for the transfer in case, adds the
+ * files to the dataset. (The dataset is locked until the transfer completes.)
+ * 
+ * (If no transfer is started within a specified timeout, permissions will
+ * automatically be revoked - see the GlobusServiceBean for details.)
+ *
+ * The option to reference a file at a remote endpoint (rather than transfer it)
+ * follows the first two steps of the process above but completes with a call to
+ * the normal /addFiles endpoint (as there is no transfer to monitor and the
+ * files can be added to the dataset immediately.)
+ */
+
+    /**
+     * Retrieve the parameters and signed URLs required to perform a globus
+     * transfer. This api endpoint is expected to be called as a signed callback
+     * after the globus-dataverse app/other app is launched, but it will accept
+     * other forms of authentication.
+     * 
+     * @param crc
+     * @param datasetId
+     */
+    @GET
+    @AuthRequired
+    @Path("{id}/globusUploadParameters")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response getGlobusUploadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
+            @QueryParam(value = "locale") String locale) {
+        // -------------------------------------
+        // (1) Get the user from the ContainerRequestContext
+        // -------------------------------------
+        AuthenticatedUser authUser;
+        try {
+            authUser = getRequestAuthenticatedUserOrDie(crc);
+        } catch (WrappedResponse e) {
+            return e.getResponse();
+        }
+        // -------------------------------------
+        // (2) Get the Dataset Id
+        // -------------------------------------
+        Dataset dataset;
+
+        try {
+            dataset = findDatasetOrDie(datasetId);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        String storeId = dataset.getEffectiveStorageDriverId();
+        // acceptsGlobusTransfers should only be true for an S3 or globus store
+        if (!GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
+                && !GlobusAccessibleStore.allowsGlobusReferences(storeId)) {
+            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusuploaddisabled"));
+        }
+
+        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
+
+        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
+        String transferEndpoint = null;
+        JsonArray referenceEndpointsWithPaths = null;
+        if (managed) {
+            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
+        } else {
+            referenceEndpointsWithPaths = GlobusAccessibleStore.getReferenceEndpointsWithPaths(storeId);
+        }
+
+        JsonObjectBuilder queryParams = Json.createObjectBuilder();
+        queryParams.add("queryParameters",
+                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
+                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
+                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
+                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
+                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
+        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
+        JsonObjectBuilder params = Json.createObjectBuilder();
+        substitutedParams.keySet().forEach((key) -> {
+            params.add(key, substitutedParams.get(key));
+        });
+        params.add("managed", Boolean.toString(managed));
+        if (transferEndpoint != null) {
+            params.add("endpoint", transferEndpoint);
+        } else {
+            params.add("referenceEndpointsWithPaths", referenceEndpointsWithPaths);
+        }
+        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
+        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
+        String requestCallName = managed ? "requestGlobusTransferPaths" : "requestGlobusReferencePaths";
+        allowedApiCalls.add(
+                Json.createObjectBuilder().add(URLTokenUtil.NAME, requestCallName).add(URLTokenUtil.HTTP_METHOD, "POST")
+                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/requestGlobusUploadPaths")
+                        .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
+        if(managed) {
+        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addGlobusFiles")
+                .add(URLTokenUtil.HTTP_METHOD, "POST")
+                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addGlobusFiles")
+                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
+        } else {
+            allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "addFiles")
+                    .add(URLTokenUtil.HTTP_METHOD, "POST")
+                    .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/addFiles")
+                    .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
+        }
+        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
+                .add(URLTokenUtil.HTTP_METHOD, "GET")
+                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
+                .add(URLTokenUtil.TIMEOUT, 5));
+        allowedApiCalls.add(
+                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
+                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
+                        .add(URLTokenUtil.TIMEOUT, 5));
+
+        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
+    }
+
+    /**
+     * Provides specific storageIdentifiers to use for each file amd requests permissions for a given globus user to upload to the dataset
+     * 
+     * @param crc
+     * @param datasetId
+     * @param jsonData - an object that must include the id of the globus "principal" involved and the "numberOfFiles" that will be transferred.
+     * @return
+     * @throws IOException
+     * @throws ExecutionException
+     * @throws InterruptedException
+     */
+    @POST
+    @AuthRequired
+    @Path("{id}/requestGlobusUploadPaths")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
+            String jsonBody) throws IOException, ExecutionException, InterruptedException {
+
+        logger.info(" ====  (api allowGlobusUpload) jsonBody   ====== " + jsonBody);
+
+        if (!systemConfig.isGlobusUpload()) {
+            return error(Response.Status.SERVICE_UNAVAILABLE,
+                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
+        }
+
+        // -------------------------------------
+        // (1) Get the user from the ContainerRequestContext
+        // -------------------------------------
+        AuthenticatedUser authUser;
+        try {
+            authUser = getRequestAuthenticatedUserOrDie(crc);
+        } catch (WrappedResponse e) {
+            return e.getResponse();
+        }
+
+        // -------------------------------------
+        // (2) Get the Dataset Id
+        // -------------------------------------
+        Dataset dataset;
+
+        try {
+            dataset = findDatasetOrDie(datasetId);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        if (permissionSvc.requestOn(createDataverseRequest(authUser), dataset)
+                .canIssue(UpdateDatasetVersionCommand.class)) {
+
+            JsonObject params = JsonUtil.getJsonObject(jsonBody);
+            if (!GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
+                try {
+                    JsonArray referencedFiles = params.getJsonArray("referencedFiles");
+                    if (referencedFiles == null || referencedFiles.size() == 0) {
+                        return badRequest("No referencedFiles specified");
+                    }
+                    JsonObject fileMap = globusService.requestReferenceFileIdentifiers(dataset, referencedFiles);
+                    return (ok(fileMap));
+                } catch (Exception e) {
+                    return badRequest(e.getLocalizedMessage());
+                }
+            } else {
+                try {
+                    String principal = params.getString("principal");
+                    int numberOfPaths = params.getInt("numberOfFiles");
+                    if (numberOfPaths <= 0) {
+                        return badRequest("numberOfFiles must be positive");
+                    }
+
+                    JsonObject response = globusService.requestAccessiblePaths(principal, dataset, numberOfPaths);
+                    switch (response.getInt("status")) {
+                    case 201:
+                        return ok(response.getJsonObject("paths"));
+                    case 400:
+                        return badRequest("Unable to grant permission");
+                    case 409:
+                        return conflict("Permission already exists");
+                    default:
+                        return error(null, "Unexpected error when granting permission");
+                    }
+
+                } catch (NullPointerException | ClassCastException e) {
+                    return badRequest("Error retrieving principal and numberOfFiles from JSON request body");
+
+                }
+            }
+        } else {
+            return forbidden("User doesn't have permission to upload to this dataset");
+        }
+
+    }
+
+    /** A method analogous to /addFiles that must also include the taskIdentifier of the transfer-in-progress to monitor
+     * 
+     * @param crc
+     * @param datasetId
+     * @param jsonData - see /addFiles documentation, aditional "taskIdentifier" key in the main object is required.
+     * @param uriInfo
+     * @return
+     * @throws IOException
+     * @throws ExecutionException
+     * @throws InterruptedException
+     */
     @POST
-    @Path("{id}/addglobusFiles")
+    @AuthRequired
+    @Path("{id}/addGlobusFiles")
     @Consumes(MediaType.MULTIPART_FORM_DATA)
-    public Response addGlobusFilesToDataset(@PathParam("id") String datasetId,
+    public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
+                                            @PathParam("id") String datasetId,
                                             @FormDataParam("jsonData") String jsonData,
-                                            @Context UriInfo uriInfo,
-                                            @Context HttpHeaders headers
+                                            @Context UriInfo uriInfo
     ) throws IOException, ExecutionException, InterruptedException {
 
         logger.info(" ====  (api addGlobusFilesToDataset) jsonData   ====== " + jsonData);
@@ -3294,7 +3714,7 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId,
         // -------------------------------------
         AuthenticatedUser authUser;
         try {
-            authUser = findAuthenticatedUserOrDie();
+            authUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse ex) {
             return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
             );
@@ -3310,6 +3730,15 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId,
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
+        
+        JsonObject jsonObject = null;
+        try {
+            jsonObject = JsonUtil.getJsonObject(jsonData);
+        } catch (Exception ex) {
+            logger.fine("Error parsing json: " + jsonData + " " + ex.getMessage());
+            return badRequest("Error parsing json body");
+
+        }
 
         //------------------------------------
         // (2b) Make sure dataset does not have package file
@@ -3340,43 +3769,286 @@ public Response addGlobusFilesToDataset(@PathParam("id") String datasetId,
         }
 
 
-        String requestUrl = headers.getRequestHeader("origin").get(0);
+        String requestUrl = SystemConfig.getDataverseSiteUrlStatic();
+        
+        // Async Call
+        globusService.globusUpload(jsonObject, token, dataset, requestUrl, authUser);
+
+        return ok("Async call to Globus Upload started ");
+
+    }
+    
+/**
+ * Retrieve the parameters and signed URLs required to perform a globus
+ * transfer/download. This api endpoint is expected to be called as a signed
+ * callback after the globus-dataverse app/other app is launched, but it will
+ * accept other forms of authentication.
+ * 
+ * @param crc
+ * @param datasetId
+ * @param locale
+ * @param downloadId - an id to a cached object listing the files involved. This is generated via Dataverse and provided to the dataverse-globus app in a signedURL.
+ * @return - JSON containing the parameters and URLs needed by the dataverse-globus app. The format is analogous to that for external tools. 
+ */
+    @GET
+    @AuthRequired
+    @Path("{id}/globusDownloadParameters")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response getGlobusDownloadParams(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
+            @QueryParam(value = "locale") String locale, @QueryParam(value = "downloadId") String downloadId) {
+        // -------------------------------------
+        // (1) Get the user from the ContainerRequestContext
+        // -------------------------------------
+        AuthenticatedUser authUser;
+        try {
+            authUser = getRequestAuthenticatedUserOrDie(crc);
+        } catch (WrappedResponse e) {
+            return e.getResponse();
+        }
+        // -------------------------------------
+        // (2) Get the Dataset Id
+        // -------------------------------------
+        Dataset dataset;
+
+        try {
+            dataset = findDatasetOrDie(datasetId);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        String storeId = dataset.getEffectiveStorageDriverId();
+        // acceptsGlobusTransfers should only be true for an S3 or globus store
+        if (!(GlobusAccessibleStore.acceptsGlobusTransfers(storeId)
+                || GlobusAccessibleStore.allowsGlobusReferences(storeId))) {
+            return badRequest(BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
+        }
 
-        if(requestUrl.contains("localhost")){
-            requestUrl = "http://localhost:8080";
+        JsonObject files = globusService.getFilesForDownload(downloadId);
+        if (files == null) {
+            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
         }
 
-        // Async Call
-        globusService.globusUpload(jsonData, token, dataset, requestUrl, authUser);
+        URLTokenUtil tokenUtil = new URLTokenUtil(dataset, authSvc.findApiTokenByUser(authUser), locale);
 
-        return ok("Async call to Globus Upload started ");
+        boolean managed = GlobusAccessibleStore.isDataverseManaged(storeId);
+        String transferEndpoint = null;
 
+        JsonObjectBuilder queryParams = Json.createObjectBuilder();
+        queryParams.add("queryParameters",
+                Json.createArrayBuilder().add(Json.createObjectBuilder().add("datasetId", "{datasetId}"))
+                        .add(Json.createObjectBuilder().add("siteUrl", "{siteUrl}"))
+                        .add(Json.createObjectBuilder().add("datasetVersion", "{datasetVersion}"))
+                        .add(Json.createObjectBuilder().add("dvLocale", "{localeCode}"))
+                        .add(Json.createObjectBuilder().add("datasetPid", "{datasetPid}")));
+        JsonObject substitutedParams = tokenUtil.getParams(queryParams.build());
+        JsonObjectBuilder params = Json.createObjectBuilder();
+        substitutedParams.keySet().forEach((key) -> {
+            params.add(key, substitutedParams.get(key));
+        });
+        params.add("managed", Boolean.toString(managed));
+        if (managed) {
+            transferEndpoint = GlobusAccessibleStore.getTransferEndpointId(storeId);
+            params.add("endpoint", transferEndpoint);
+        }
+        params.add("files", files);
+        int timeoutSeconds = JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class);
+        JsonArrayBuilder allowedApiCalls = Json.createArrayBuilder();
+        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "monitorGlobusDownload")
+                .add(URLTokenUtil.HTTP_METHOD, "POST")
+                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/monitorGlobusDownload")
+                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
+        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "requestGlobusDownload")
+                .add(URLTokenUtil.HTTP_METHOD, "POST")
+                .add(URLTokenUtil.URL_TEMPLATE,
+                        "/api/v1/datasets/{datasetId}/requestGlobusDownload?downloadId=" + downloadId)
+                .add(URLTokenUtil.TIMEOUT, timeoutSeconds));
+        allowedApiCalls.add(Json.createObjectBuilder().add(URLTokenUtil.NAME, "getDatasetMetadata")
+                .add(URLTokenUtil.HTTP_METHOD, "GET")
+                .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}")
+                .add(URLTokenUtil.TIMEOUT, 5));
+        allowedApiCalls.add(
+                Json.createObjectBuilder().add(URLTokenUtil.NAME, "getFileListing").add(URLTokenUtil.HTTP_METHOD, "GET")
+                        .add(URLTokenUtil.URL_TEMPLATE, "/api/v1/datasets/{datasetId}/versions/{datasetVersion}/files")
+                        .add(URLTokenUtil.TIMEOUT, 5));
+
+        return ok(tokenUtil.createPostBody(params.build(), allowedApiCalls.build()));
     }
 
+    /**
+     * Requests permissions for a given globus user to download the specified files
+     * the dataset and returns information about the paths to transfer from.
+     * 
+     * When called directly rather than in response to being given a downloadId, the jsonData can include a "fileIds" key with an array of file ids to transfer.
+     * 
+     * @param crc
+     * @param datasetId
+     * @param jsonData - a JSON object that must include the id of the  Globus "principal" that will be transferring the files in the case where Dataverse manages the Globus endpoint. For remote endpoints, the principal is not required.
+     * @return - a JSON object containing a map of file ids to Globus endpoint/path
+     * @throws IOException
+     * @throws ExecutionException
+     * @throws InterruptedException
+     */
     @POST
-    @Path("{id}/deleteglobusRule")
-    @Consumes(MediaType.MULTIPART_FORM_DATA)
-    public Response deleteglobusRule(@PathParam("id") String datasetId,@FormDataParam("jsonData") String jsonData
-    ) throws IOException, ExecutionException, InterruptedException {
+    @AuthRequired
+    @Path("{id}/requestGlobusDownload")
+    @Consumes(MediaType.APPLICATION_JSON)
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response requestGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
+            @QueryParam(value = "downloadId") String downloadId, String jsonBody)
+            throws IOException, ExecutionException, InterruptedException {
 
+        logger.info(" ====  (api allowGlobusDownload) jsonBody   ====== " + jsonBody);
 
-        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
+        if (!systemConfig.isGlobusDownload()) {
+            return error(Response.Status.SERVICE_UNAVAILABLE,
+                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
+        }
 
+        // -------------------------------------
+        // (1) Get the user from the ContainerRequestContext
+        // -------------------------------------
+        User user = getRequestUser(crc);
 
-        if (!systemConfig.isHTTPUpload()) {
-            return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
+        // -------------------------------------
+        // (2) Get the Dataset Id
+        // -------------------------------------
+        Dataset dataset;
+
+        try {
+            dataset = findDatasetOrDie(datasetId);
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+        JsonObject body = null;
+        if (jsonBody != null) {
+            body = JsonUtil.getJsonObject(jsonBody);
+        }
+        Set<String> fileIds = null;
+        if (downloadId != null) {
+            JsonObject files = globusService.getFilesForDownload(downloadId);
+            if (files != null) {
+                fileIds = files.keySet();
+            }
+        } else {
+            if ((body!=null) && body.containsKey("fileIds")) {
+                Collection<JsonValue> fileVals = body.getJsonArray("fileIds").getValuesAs(JsonValue.class);
+                fileIds = new HashSet<String>(fileVals.size());
+                for (JsonValue fileVal : fileVals) {
+                    String id = null;
+                    switch (fileVal.getValueType()) {
+                    case STRING:
+                        id = ((JsonString) fileVal).getString();
+                        break;
+                    case NUMBER:
+                        id = ((JsonNumber) fileVal).toString();
+                        break;
+                    default:
+                        return badRequest("fileIds must be numeric or string (ids/PIDs)");
+                    }
+                    ;
+                    fileIds.add(id);
+                }
+            } else {
+                return badRequest("fileIds JsonArray of file ids/PIDs required in POST body");
+            }
+        }
+
+        if (fileIds.isEmpty()) {
+            return notFound(BundleUtil.getStringFromBundle("datasets.api.globusdownloadnotfound"));
+        }
+        ArrayList<DataFile> dataFiles = new ArrayList<DataFile>(fileIds.size());
+        for (String id : fileIds) {
+            boolean published = false;
+            logger.info("File id: " + id);
+
+            DataFile df = null;
+            try {
+                df = findDataFileOrDie(id);
+            } catch (WrappedResponse wr) {
+                return wr.getResponse();
+            }
+            if (!df.getOwner().equals(dataset)) {
+                return badRequest("All files must be in the dataset");
+            }
+            dataFiles.add(df);
+
+            for (FileMetadata fm : df.getFileMetadatas()) {
+                if (fm.getDatasetVersion().isPublished()) {
+                    published = true;
+                    break;
+                }
+            }
+
+            if (!published) {
+                // If the file is not published, they can still download the file, if the user
+                // has the permission to view unpublished versions:
+
+                if (!permissionService.hasPermissionsFor(user, df.getOwner(),
+                        EnumSet.of(Permission.ViewUnpublishedDataset))) {
+                    return forbidden("User doesn't have permission to download file: " + id);
+                }
+            } else { // published and restricted and/or embargoed
+                if (df.isRestricted() || FileUtil.isActivelyEmbargoed(df))
+                    // This line also handles all three authenticated session user, token user, and
+                    // guest cases.
+                    if (!permissionService.hasPermissionsFor(user, df, EnumSet.of(Permission.DownloadFile))) {
+                        return forbidden("User doesn't have permission to download file: " + id);
+                    }
+
+            }
+        }
+        // Allowed to download all requested files
+        JsonObject files = GlobusUtil.getFilesMap(dataFiles, dataset);
+        if (GlobusAccessibleStore.isDataverseManaged(dataset.getEffectiveStorageDriverId())) {
+            // If managed, give the principal read permissions
+            int status = globusService.setPermissionForDownload(dataset, body.getString("principal"));
+            switch (status) {
+            case 201:
+                return ok(files);
+            case 400:
+                return badRequest("Unable to grant permission");
+            case 409:
+                return conflict("Permission already exists");
+            default:
+                return error(null, "Unexpected error when granting permission");
+            }
+
+        }
+
+        return ok(files);
+    }
+
+    /**
+     * Monitors a globus download and removes permissions on the dir/dataset when
+     * the specified transfer task is completed.
+     * 
+     * @param crc
+     * @param datasetId
+     * @param jsonData  - a JSON Object containing the key "taskIdentifier" with the
+     *                  Globus task to monitor.
+     * @return
+     * @throws IOException
+     * @throws ExecutionException
+     * @throws InterruptedException
+     */
+    @POST
+    @AuthRequired
+    @Path("{id}/monitorGlobusDownload")
+    @Consumes(MediaType.APPLICATION_JSON)
+    public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @PathParam("id") String datasetId,
+            String jsonData) throws IOException, ExecutionException, InterruptedException {
+
+        logger.info(" ====  (api deleteglobusRule) jsonData   ====== " + jsonData);
+
+        if (!systemConfig.isGlobusDownload()) {
+            return error(Response.Status.SERVICE_UNAVAILABLE,
+                    BundleUtil.getStringFromBundle("datasets.api.globusdownloaddisabled"));
         }
 
         // -------------------------------------
-        // (1) Get the user from the API key
+        // (1) Get the user from the ContainerRequestContext
         // -------------------------------------
         User authUser;
-        try {
-            authUser = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
-            );
-        }
+        authUser = getRequestUser(crc);
 
         // -------------------------------------
         // (2) Get the Dataset Id
@@ -3396,7 +4068,6 @@ public Response deleteglobusRule(@PathParam("id") String datasetId,@FormDataPara
 
     }
 
-
     /**
      * Add multiple Files to an existing Dataset
      *
@@ -3405,25 +4076,21 @@ public Response deleteglobusRule(@PathParam("id") String datasetId,@FormDataPara
      * @return
      */
     @POST
+    @AuthRequired
     @Path("{id}/addFiles")
     @Consumes(MediaType.MULTIPART_FORM_DATA)
-    public Response addFilesToDataset(@PathParam("id") String idSupplied,
-                                      @FormDataParam("jsonData") String jsonData) {
+    public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
+            @FormDataParam("jsonData") String jsonData) {
 
         if (!systemConfig.isHTTPUpload()) {
             return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
         }
 
         // -------------------------------------
-        // (1) Get the user from the API key
+        // (1) Get the user from the ContainerRequestContext
         // -------------------------------------
         User authUser;
-        try {
-            authUser = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
-            );
-        }
+        authUser = getRequestUser(crc);
 
         // -------------------------------------
         // (2) Get the Dataset Id
@@ -3476,25 +4143,22 @@ public Response addFilesToDataset(@PathParam("id") String idSupplied,
      * @return
      */
     @POST
+    @AuthRequired
     @Path("{id}/replaceFiles")
     @Consumes(MediaType.MULTIPART_FORM_DATA)
-    public Response replaceFilesInDataset(@PathParam("id") String idSupplied,
-                                      @FormDataParam("jsonData") String jsonData) {
+    public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
+                                          @PathParam("id") String idSupplied,
+                                          @FormDataParam("jsonData") String jsonData) {
 
         if (!systemConfig.isHTTPUpload()) {
             return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
         }
 
         // -------------------------------------
-        // (1) Get the user from the API key
+        // (1) Get the user from the ContainerRequestContext
         // -------------------------------------
         User authUser;
-        try {
-            authUser = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth")
-            );
-        }
+        authUser = getRequestUser(crc);
 
         // -------------------------------------
         // (2) Get the Dataset Id
@@ -3546,12 +4210,13 @@ public Response replaceFilesInDataset(@PathParam("id") String idSupplied,
      * @throws WrappedResponse
      */
     @GET
+    @AuthRequired
     @Path("/listCurationStates")
     @Produces("text/csv")
-    public Response getCurationStates() throws WrappedResponse {
+    public Response getCurationStates(@Context ContainerRequestContext crc) throws WrappedResponse {
 
         try {
-            AuthenticatedUser user = findAuthenticatedUserOrDie();
+            AuthenticatedUser user = getRequestAuthenticatedUserOrDie(crc);
             if (!user.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -3576,7 +4241,7 @@ public Response getCurationStates() throws WrappedResponse {
                 BundleUtil.getStringFromBundle("datasets.api.modificationdate"),
                 BundleUtil.getStringFromBundle("datasets.api.curationstatus"),
                 String.join(",", assignees.keySet())));
-        for (Dataset dataset : datasetSvc.findAllUnpublished()) {
+        for (Dataset dataset : datasetSvc.findAllWithDraftVersion()) {
             List<RoleAssignment> ras = permissionService.assignmentsOn(dataset);
             curationRoles.forEach(r -> {
                 assignees.put(r.getAlias(), new HashSet<String>());
@@ -3586,11 +4251,12 @@ public Response getCurationStates() throws WrappedResponse {
                     assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier());
                 }
             }
+            DatasetVersion dsv = dataset.getLatestVersion();
             String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\"";
-            String status = dataset.getLatestVersion().getExternalStatusLabel();
+            String status = dsv.getExternalStatusLabel();
             String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString();
-            String date = new SimpleDateFormat("yyyy-MM-dd").format(dataset.getCreateDate());
-            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dataset.getModificationTime());
+            String date = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getCreateTime());
+            String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dsv.getLastUpdateTime());
             String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\"";
             List<String> sList = new ArrayList<String>();
             assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue())));
@@ -3603,13 +4269,17 @@ public Response getCurationStates() throws WrappedResponse {
     // APIs to manage archival status
 
     @GET
+    @AuthRequired
     @Produces(MediaType.APPLICATION_JSON)
     @Path("/{id}/{version}/archivalStatus")
-    public Response getDatasetVersionArchivalStatus(@PathParam("id") String datasetId,
-            @PathParam("version") String versionNumber, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+    public Response getDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
+                                                    @PathParam("id") String datasetId,
+                                                    @PathParam("version") String versionNumber,
+                                                    @Context UriInfo uriInfo,
+                                                    @Context HttpHeaders headers) {
 
         try {
-            AuthenticatedUser au = findAuthenticatedUserOrDie();
+            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
             if (!au.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -3629,15 +4299,19 @@ public Response getDatasetVersionArchivalStatus(@PathParam("id") String datasetI
     }
 
     @PUT
+    @AuthRequired
     @Consumes(MediaType.APPLICATION_JSON)
     @Path("/{id}/{version}/archivalStatus")
-    public Response setDatasetVersionArchivalStatus(@PathParam("id") String datasetId,
-            @PathParam("version") String versionNumber, String newStatus, @Context UriInfo uriInfo,
-            @Context HttpHeaders headers) {
+    public Response setDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
+                                                    @PathParam("id") String datasetId,
+                                                    @PathParam("version") String versionNumber,
+                                                    String newStatus,
+                                                    @Context UriInfo uriInfo,
+                                                    @Context HttpHeaders headers) {
 
         logger.fine(newStatus);
         try {
-            AuthenticatedUser au = findAuthenticatedUserOrDie();
+            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
 
             if (!au.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
@@ -3683,13 +4357,17 @@ public Response setDatasetVersionArchivalStatus(@PathParam("id") String datasetI
     }
     
     @DELETE
+    @AuthRequired
     @Produces(MediaType.APPLICATION_JSON)
     @Path("/{id}/{version}/archivalStatus")
-    public Response deleteDatasetVersionArchivalStatus(@PathParam("id") String datasetId,
-            @PathParam("version") String versionNumber, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+    public Response deleteDatasetVersionArchivalStatus(@Context ContainerRequestContext crc,
+                                                       @PathParam("id") String datasetId,
+                                                       @PathParam("version") String versionNumber,
+                                                       @Context UriInfo uriInfo,
+                                                       @Context HttpHeaders headers) {
 
         try {
-            AuthenticatedUser au = findAuthenticatedUserOrDie();
+            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
             if (!au.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "Superusers only.");
             }
@@ -3733,11 +4411,15 @@ private boolean isSingleVersionArchiving() {
     // This supports the cases where a tool is accessing a restricted resource (e.g.
     // for a draft dataset), or public case.
     @GET
+    @AuthRequired
     @Path("{id}/versions/{version}/toolparams/{tid}")
-    public Response getExternalToolDVParams(@PathParam("tid") long externalToolId,
-            @PathParam("id") String datasetId, @PathParam("version") String version, @QueryParam(value = "locale") String locale) {
+    public Response getExternalToolDVParams(@Context ContainerRequestContext crc,
+                                            @PathParam("tid") long externalToolId,
+                                            @PathParam("id") String datasetId,
+                                            @PathParam("version") String version,
+                                            @QueryParam(value = "locale") String locale) {
         try {
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
             DatasetVersion target = getDatasetVersionOrDie(req, version, findDatasetOrDie(datasetId), null, null);
             if (target == null) {
                 return error(BAD_REQUEST, "DatasetVersion not found.");
@@ -3751,16 +4433,209 @@ public Response getExternalToolDVParams(@PathParam("tid") long externalToolId,
                 return error(BAD_REQUEST, "External tool does not have dataset scope.");
             }
             ApiToken apiToken = null;
-            User u = findUserOrDie();
-            if (u instanceof AuthenticatedUser) {
-                apiToken = authSvc.findApiTokenByUser((AuthenticatedUser) u);
+            User u = getRequestUser(crc);
+            apiToken = authSvc.getValidApiTokenForUser(u);
+
+            URLTokenUtil eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
+            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+    }
+
+    @GET
+    @Path("summaryFieldNames")
+    public Response getDatasetSummaryFieldNames() {
+        String customFieldNames = settingsService.getValueForKey(SettingsServiceBean.Key.CustomDatasetSummaryFields);
+        String[] fieldNames = DatasetUtil.getDatasetSummaryFieldNames(customFieldNames);
+        JsonArrayBuilder fieldNamesArrayBuilder = Json.createArrayBuilder();
+        for (String fieldName : fieldNames) {
+            fieldNamesArrayBuilder.add(fieldName);
+        }
+        return ok(fieldNamesArrayBuilder);
+    }
+
+    @GET
+    @Path("privateUrlDatasetVersion/{privateUrlToken}")
+    public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String privateUrlToken) {
+        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
+        if (privateUrlUser == null) {
+            return notFound("Private URL user not found");
+        }
+        boolean isAnonymizedAccess = privateUrlUser.hasAnonymizedAccess();
+        String anonymizedFieldTypeNames = settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
+        if(isAnonymizedAccess && anonymizedFieldTypeNames == null) {
+            throw new NotAcceptableException("Anonymized Access not enabled");
+        }
+        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
+        if (dsv == null || dsv.getId() == null) {
+            return notFound("Dataset version not found");
+        }
+        JsonObjectBuilder responseJson;
+        if (isAnonymizedAccess) {
+            List<String> anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s")));
+            responseJson = json(dsv, anonymizedFieldTypeNamesList, true);
+        } else {
+            responseJson = json(dsv, true);
+        }
+        return ok(responseJson);
+    }
+
+    @GET
+    @Path("privateUrlDatasetVersion/{privateUrlToken}/citation")
+    public Response getPrivateUrlDatasetVersionCitation(@PathParam("privateUrlToken") String privateUrlToken) {
+        PrivateUrlUser privateUrlUser = privateUrlService.getPrivateUrlUserFromToken(privateUrlToken);
+        if (privateUrlUser == null) {
+            return notFound("Private URL user not found");
+        }
+        DatasetVersion dsv = privateUrlService.getDraftDatasetVersionFromToken(privateUrlToken);
+        return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found")
+                : ok(dsv.getCitation(true, privateUrlUser.hasAnonymizedAccess()));
+    }
+
+    @GET
+    @AuthRequired
+    @Path("{id}/versions/{versionId}/citation")
+    public Response getDatasetVersionCitation(@Context ContainerRequestContext crc,
+                                              @PathParam("id") String datasetId,
+                                              @PathParam("versionId") String versionId,
+                                              @QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
+                                              @Context UriInfo uriInfo,
+                                              @Context HttpHeaders headers) {
+        return response(req -> ok(
+                getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned).getCitation(true, false)), getRequestUser(crc));
+    }
+
+    @POST
+    @AuthRequired
+    @Path("{id}/versions/{versionId}/deaccession")
+    public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+        if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) {
+            return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)));
+        }
+        return response(req -> {
+            DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false);
+            try {
+                JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody);
+                datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason"));
+                String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null);
+                if (deaccessionForwardURL != null) {
+                    try {
+                        datasetVersion.setArchiveNote(deaccessionForwardURL);
+                    } catch (IllegalArgumentException iae) {
+                        return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage())));
+                    }
+                }
+                execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false));
+                return ok("Dataset " + datasetId + " deaccessioned for version " + versionId);
+            } catch (JsonParsingException jpe) {
+                return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
             }
-            
+        }, getRequestUser(crc));
+    }
+
+    @GET
+    @AuthRequired
+    @Path("{identifier}/guestbookEntryAtRequest")
+    public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
+                                            @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
+
+        Dataset dataset;
+
+        try {
+            dataset = findDatasetOrDie(dvIdtf);
+        } catch (WrappedResponse ex) {
+            return error(Response.Status.NOT_FOUND, "No such dataset");
+        }
+        String gbAtRequest = dataset.getGuestbookEntryAtRequest();
+        if(gbAtRequest == null || gbAtRequest.equals(DvObjectContainer.UNDEFINED_CODE)) {
+            return ok("Not set on dataset, using the default: " + dataset.getEffectiveGuestbookEntryAtRequest());
+        }
+        return ok(dataset.getEffectiveGuestbookEntryAtRequest());
+    }
+
+    @PUT
+    @AuthRequired
+    @Path("{identifier}/guestbookEntryAtRequest")
+    public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
+                                               boolean gbAtRequest,
+                                               @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
+
+        // Superuser-only:
+        AuthenticatedUser user;
+        try {
+            user = getRequestAuthenticatedUserOrDie(crc);
+        } catch (WrappedResponse ex) {
+            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
+        }
+        if (!user.isSuperuser()) {
+            return error(Response.Status.FORBIDDEN, "Superusers only.");
+        }
+
+        Dataset dataset;
+
+        try {
+            dataset = findDatasetOrDie(dvIdtf);
+        } catch (WrappedResponse ex) {
+            return error(Response.Status.NOT_FOUND, "No such dataset");
+        }
+        Optional<Boolean> gbAtRequestOpt = JvmSettings.GUESTBOOK_AT_REQUEST.lookupOptional(Boolean.class);
+        if (!gbAtRequestOpt.isPresent()) {
+            return error(Response.Status.FORBIDDEN, "Guestbook Entry At Request cannot be set. This server is not configured to allow it.");
+        }
+        String choice = Boolean.valueOf(gbAtRequest).toString();
+        dataset.setGuestbookEntryAtRequest(choice);
+        datasetService.merge(dataset);
+        return ok("Guestbook Entry At Request set to: " + choice);
+    }
 
-            ExternalToolHandler eth = new ExternalToolHandler(externalTool, target.getDataset(), apiToken, locale);
-            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters()))));
+    @DELETE
+    @AuthRequired
+    @Path("{identifier}/guestbookEntryAtRequest")
+    public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
+                                                 @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse {
+
+        // Superuser-only:
+        AuthenticatedUser user;
+        try {
+            user = getRequestAuthenticatedUserOrDie(crc);
+        } catch (WrappedResponse ex) {
+            return error(Response.Status.BAD_REQUEST, "Authentication is required.");
+        }
+        if (!user.isSuperuser()) {
+            return error(Response.Status.FORBIDDEN, "Superusers only.");
+        }
+
+        Dataset dataset;
+
+        try {
+            dataset = findDatasetOrDie(dvIdtf);
+        } catch (WrappedResponse ex) {
+            return error(Response.Status.NOT_FOUND, "No such dataset");
+        }
+
+        dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE);
+        datasetService.merge(dataset);
+        return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest());
+    }
+
+    @GET
+    @AuthRequired
+    @Path("{id}/userPermissions")
+    public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) {
+        Dataset dataset;
+        try {
+            dataset = findDatasetOrDie(datasetId);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
+        User requestUser = getRequestUser(crc);
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset));
+        jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset));
+        jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset));
+        jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions));
+        jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft));
+        return ok(jsonObjectBuilder);
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
index 90130cb3944..6c1bf42c02a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java
@@ -9,15 +9,18 @@
 import edu.harvard.iq.dataverse.DataverseContact;
 import edu.harvard.iq.dataverse.DataverseMetadataBlockFacet;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean;
 import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.GlobalIdServiceBean;
 import edu.harvard.iq.dataverse.GuestbookResponseServiceBean;
 import edu.harvard.iq.dataverse.GuestbookServiceBean;
 import edu.harvard.iq.dataverse.MetadataBlock;
 import edu.harvard.iq.dataverse.RoleAssignment;
+
 import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO;
 import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO;
 import edu.harvard.iq.dataverse.api.dto.RoleDTO;
@@ -38,9 +41,13 @@
 import edu.harvard.iq.dataverse.engine.command.impl.CreateExplicitGroupCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.DeleteCollectionQuotaCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteDataverseLinkingDataverseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteExplicitGroupCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetSchemaCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.GetCollectionQuotaCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.GetCollectionStorageUseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetRootCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.GetDataverseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.GetDataverseStorageSizeCommand;
@@ -60,11 +67,14 @@
 import edu.harvard.iq.dataverse.engine.command.impl.PublishDataverseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RemoveRoleAssigneesFromExplicitGroupCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.SetCollectionQuotaCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseDefaultContributorRoleCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateExplicitGroupCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetsCommand;
+import edu.harvard.iq.dataverse.engine.command.impl.ValidateDatasetJsonCommand;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
@@ -73,40 +83,43 @@
 
 import edu.harvard.iq.dataverse.util.json.JSONLDUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
+import edu.harvard.iq.dataverse.util.json.JsonPrinter;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief;
-import java.io.StringReader;
 import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.TreeSet;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonNumber;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.json.JsonValue.ValueType;
-import javax.json.stream.JsonParsingException;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonNumber;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.json.stream.JsonParsingException;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import java.io.IOException;
@@ -118,10 +131,10 @@
 import java.util.Map;
 import java.util.Optional;
 import java.util.stream.Collectors;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.StreamingOutput;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.StreamingOutput;
 import javax.xml.stream.XMLStreamException;
 
 /**
@@ -156,21 +169,23 @@ public class Dataverses extends AbstractApiBean {
 
     @EJB
     SwordServiceBean swordService;
-
+    
     @POST
-    public Response addRoot(String body) {
+    @AuthRequired
+    public Response addRoot(@Context ContainerRequestContext crc, String body) {
         logger.info("Creating root dataverse");
-        return addDataverse(body, "");
+        return addDataverse(crc, body, "");
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}")
-    public Response addDataverse(String body, @PathParam("identifier") String parentIdtf) {
+    public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) {
 
         Dataverse d;
         JsonObject dvJson;
-        try (StringReader rdr = new StringReader(body)) {
-            dvJson = Json.createReader(rdr).readObject();
+        try {
+            dvJson = JsonUtil.getJsonObject(body);
             d = jsonParser().parseDataverse(dvJson);
         } catch (JsonParsingException jpe) {
             logger.log(Level.SEVERE, "Json: {0}", body);
@@ -192,7 +207,7 @@ public Response addDataverse(String body, @PathParam("identifier") String parent
                 dc.setDataverse(d);
             }
 
-            AuthenticatedUser u = findAuthenticatedUserOrDie();
+            AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc);
             d = execCommand(new CreateDataverseCommand(d, createDataverseRequest(u), null, null));
             return created("/dataverses/" + d.getAlias(), json(d));
         } catch (WrappedResponse ww) {
@@ -222,17 +237,55 @@ public Response addDataverse(String body, @PathParam("identifier") String parent
 
         }
     }
+    
+    @POST
+    @AuthRequired
+    @Path("{identifier}/validateDatasetJson")
+    @Consumes("application/json")
+    public Response validateDatasetJson(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String idtf) {
+        User u = getRequestUser(crc);
+        try {
+            String validationMessage = execCommand(new ValidateDatasetJsonCommand(createDataverseRequest(u), findDataverseOrDie(idtf), body));
+            return ok(validationMessage);
+        } catch (WrappedResponse ex) {
+            Logger.getLogger(Dataverses.class.getName()).log(Level.SEVERE, null, ex);
+            return ex.getResponse();
+        }
+    }
+    
+    @GET
+    @AuthRequired
+    @Path("{identifier}/datasetSchema")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response getDatasetSchema(@Context ContainerRequestContext crc, @PathParam("identifier") String idtf) {
+        User u = getRequestUser(crc);
+
+        try {
+            String datasetSchema = execCommand(new GetDatasetSchemaCommand(createDataverseRequest(u), findDataverseOrDie(idtf)));
+            JsonObject jsonObject = JsonUtil.getJsonObject(datasetSchema);
+            return Response.ok(jsonObject).build();
+        } catch (WrappedResponse ex) {
+            Logger.getLogger(Dataverses.class.getName()).log(Level.SEVERE, null, ex);
+            return ex.getResponse();
+        }
+    }
+            
+    
 
     @POST
+    @AuthRequired
     @Path("{identifier}/datasets")
     @Consumes("application/json")
-    public Response createDataset(String jsonBody, @PathParam("identifier") String parentIdtf) {
+    public Response createDataset(@Context ContainerRequestContext crc, String jsonBody, @PathParam("identifier") String parentIdtf, @QueryParam("doNotValidate") String doNotValidateParam) {
         try {
             logger.fine("Json is: " + jsonBody);
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             Dataverse owner = findDataverseOrDie(parentIdtf);
             Dataset ds = parseDataset(jsonBody);
             ds.setOwner(owner);
+            // Will make validation happen always except for the (rare) occasion of all three conditions are true
+            boolean validate = ! ( u.isAuthenticated() && StringUtil.isTrue(doNotValidateParam) &&
+                JvmSettings.API_ALLOW_INCOMPLETE_METADATA.lookupOptional(Boolean.class).orElse(false) );
 
             if (ds.getVersions().isEmpty()) {
                 return badRequest(BundleUtil.getStringFromBundle("dataverses.api.create.dataset.error.mustIncludeVersion"));
@@ -247,6 +300,11 @@ public Response createDataset(String jsonBody, @PathParam("identifier") String p
 
             // clean possible version metadata
             DatasetVersion version = ds.getVersions().get(0);
+
+            if (!validate && (version.getDatasetAuthors().isEmpty() || version.getDatasetAuthors().stream().anyMatch(a -> a.getName() == null || a.getName().isEmpty()))) {
+                return badRequest(BundleUtil.getStringFromBundle("dataverses.api.create.dataset.error.mustIncludeAuthorName"));
+            }
+
             version.setMinorVersionNumber(null);
             version.setVersionNumber(null);
             version.setVersionState(DatasetVersion.VersionState.DRAFT);
@@ -259,7 +317,7 @@ public Response createDataset(String jsonBody, @PathParam("identifier") String p
             ds.setGlobalIdCreateTime(null);
             Dataset managedDs = null;
             try {
-                managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u)));
+                managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u), null, validate));
             } catch (WrappedResponse ww) {
                 Throwable cause = ww.getCause();
                 StringBuilder sb = new StringBuilder();
@@ -283,7 +341,7 @@ public Response createDataset(String jsonBody, @PathParam("identifier") String p
             return created("/datasets/" + managedDs.getId(),
                     Json.createObjectBuilder()
                             .add("id", managedDs.getId())
-                            .add("persistentId", managedDs.getGlobalIdString())
+                            .add("persistentId", managedDs.getGlobalId().asString())
             );
 
         } catch (WrappedResponse ex) {
@@ -292,11 +350,12 @@ public Response createDataset(String jsonBody, @PathParam("identifier") String p
     }
     
     @POST
+    @AuthRequired
     @Path("{identifier}/datasets")
     @Consumes("application/ld+json, application/json-ld")
-    public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifier") String parentIdtf) {
+    public Response createDatasetFromJsonLd(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("identifier") String parentIdtf) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             Dataverse owner = findDataverseOrDie(parentIdtf);
             Dataset ds = new Dataset();
 
@@ -325,7 +384,7 @@ public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifie
             return created("/datasets/" + managedDs.getId(),
                     Json.createObjectBuilder()
                             .add("id", managedDs.getId())
-                            .add("persistentId", managedDs.getGlobalIdString())
+                            .add("persistentId", managedDs.getGlobalId().asString())
             );
 
         } catch (WrappedResponse ex) {
@@ -334,10 +393,11 @@ public Response createDatasetFromJsonLd(String jsonLDBody, @PathParam("identifie
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/datasets/:import")
-    public Response importDataset(String jsonBody, @PathParam("identifier") String parentIdtf, @QueryParam("pid") String pidParam, @QueryParam("release") String releaseParam) {
+    public Response importDataset(@Context ContainerRequestContext crc, String jsonBody, @PathParam("identifier") String parentIdtf, @QueryParam("pid") String pidParam, @QueryParam("release") String releaseParam) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             if (!u.isSuperuser()) {
                 return error(Status.FORBIDDEN, "Not a superuser");
             }
@@ -361,7 +421,7 @@ public Response importDataset(String jsonBody, @PathParam("identifier") String p
                 if (!GlobalId.verifyImportCharacters(pidParam)) {
                     return badRequest("PID parameter contains characters that are not allowed by the Dataverse application. On import, the PID must only contain characters specified in this regex: " + BundleUtil.getStringFromBundle("pid.allowedCharacters"));
                 }
-                Optional<GlobalId> maybePid = GlobalId.parse(pidParam);
+                Optional<GlobalId> maybePid = GlobalIdServiceBean.parse(pidParam);
                 if (maybePid.isPresent()) {
                     ds.setGlobalId(maybePid.get());
                 } else {
@@ -392,7 +452,7 @@ public Response importDataset(String jsonBody, @PathParam("identifier") String p
             Dataset managedDs = execCommand(new ImportDatasetCommand(ds, request));
             JsonObjectBuilder responseBld = Json.createObjectBuilder()
                     .add("id", managedDs.getId())
-                    .add("persistentId", managedDs.getGlobalIdString());
+                    .add("persistentId", managedDs.getGlobalId().asString());
 
             if (shouldRelease) {
                 PublishDatasetResult res = execCommand(new PublishDatasetCommand(managedDs, request, false, shouldRelease));
@@ -408,10 +468,11 @@ public Response importDataset(String jsonBody, @PathParam("identifier") String p
 
     // TODO decide if I merge importddi with import just below (xml and json on same api, instead of 2 api)
     @POST
+    @AuthRequired
     @Path("{identifier}/datasets/:importddi")
-    public Response importDatasetDdi(String xml, @PathParam("identifier") String parentIdtf, @QueryParam("pid") String pidParam, @QueryParam("release") String releaseParam) {
+    public Response importDatasetDdi(@Context ContainerRequestContext crc, String xml, @PathParam("identifier") String parentIdtf, @QueryParam("pid") String pidParam, @QueryParam("release") String releaseParam) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             if (!u.isSuperuser()) {
                 return error(Status.FORBIDDEN, "Not a superuser");
             }
@@ -435,7 +496,7 @@ public Response importDatasetDdi(String xml, @PathParam("identifier") String par
                 if (!GlobalId.verifyImportCharacters(pidParam)) {
                     return badRequest("PID parameter contains characters that are not allowed by the Dataverse application. On import, the PID must only contain characters specified in this regex: " + BundleUtil.getStringFromBundle("pid.allowedCharacters"));
                 }
-                Optional<GlobalId> maybePid = GlobalId.parse(pidParam);
+                Optional<GlobalId> maybePid = GlobalIdServiceBean.parse(pidParam);
                 if (maybePid.isPresent()) {
                     ds.setGlobalId(maybePid.get());
                 } else {
@@ -457,7 +518,7 @@ public Response importDatasetDdi(String xml, @PathParam("identifier") String par
 
             JsonObjectBuilder responseBld = Json.createObjectBuilder()
                     .add("id", managedDs.getId())
-                    .add("persistentId", managedDs.getGlobalIdString());
+                    .add("persistentId", managedDs.getGlobalId().toString());
 
             if (shouldRelease) {
                 DatasetVersion latestVersion = ds.getLatestVersion();
@@ -482,11 +543,12 @@ public Response importDatasetDdi(String xml, @PathParam("identifier") String par
     }
     
     @POST
+    @AuthRequired
     @Path("{identifier}/datasets/:startmigration")
     @Consumes("application/ld+json, application/json-ld")
-    public Response recreateDataset(String jsonLDBody, @PathParam("identifier") String parentIdtf) {
+    public Response recreateDataset(@Context ContainerRequestContext crc, String jsonLDBody, @PathParam("identifier") String parentIdtf) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             if (!u.isSuperuser()) {
                 return error(Status.FORBIDDEN, "Not a superuser");
             }
@@ -503,7 +565,7 @@ public Response recreateDataset(String jsonLDBody, @PathParam("identifier") Stri
             ds.getIdentifier().startsWith(settingsService.getValueForKey(SettingsServiceBean.Key.Shoulder)))) {
                 throw new BadRequestException("Cannot recreate a dataset that has a PID that doesn't match the server's settings");
             }
-            if(!datasetSvc.isIdentifierLocallyUnique(ds)) {
+            if(!dvObjectSvc.isGlobalIdLocallyUnique(ds.getGlobalId())) {
                 throw new BadRequestException("Cannot recreate a dataset whose PID is already in use");
             }
             
@@ -537,8 +599,8 @@ public Response recreateDataset(String jsonLDBody, @PathParam("identifier") Stri
     }
     
     private Dataset parseDataset(String datasetJson) throws WrappedResponse {
-        try (StringReader rdr = new StringReader(datasetJson)) {
-            return jsonParser().parseDataset(Json.createReader(rdr).readObject());
+        try {
+            return jsonParser().parseDataset(JsonUtil.getJsonObject(datasetJson));
         } catch (JsonParsingException | JsonParseException jpe) {
             logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson);
             throw new WrappedResponse(error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()));
@@ -546,39 +608,112 @@ private Dataset parseDataset(String datasetJson) throws WrappedResponse {
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}")
-    public Response viewDataverse(@PathParam("identifier") String idtf) {
+    public Response viewDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String idtf) {
         return response(req -> ok(
             json(execCommand(new GetDataverseCommand(req, findDataverseOrDie(idtf))),
                 settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false)
-            )));
+            )), getRequestUser(crc));
     }
 
     @DELETE
+    @AuthRequired
     @Path("{identifier}")
-    public Response deleteDataverse(@PathParam("identifier") String idtf) {
+    public Response deleteDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String idtf) {
         return response(req -> {
             execCommand(new DeleteDataverseCommand(req, findDataverseOrDie(idtf)));
             return ok("Dataverse " + idtf + " deleted");
-        });
+        }, getRequestUser(crc));
+    }
+
+    /**
+     * Endpoint to change attributes of a Dataverse collection.
+     *
+     * @apiNote Example curl command:
+     *          <code>curl -X PUT -d "test" http://localhost:8080/api/dataverses/$ALIAS/attribute/alias</code>
+     *          to change the alias of the collection named $ALIAS to "test".
+     */
+    @PUT
+    @AuthRequired
+    @Path("{identifier}/attribute/{attribute}")
+    public Response updateAttribute(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier,
+                                    @PathParam("attribute") String attribute, @QueryParam("value") String value) {
+        try {
+            Dataverse collection = findDataverseOrDie(identifier);
+            User user = getRequestUser(crc);
+            DataverseRequest dvRequest = createDataverseRequest(user);
+    
+            // TODO: The cases below use hard coded strings, because we have no place for definitions of those!
+            //       They are taken from util.json.JsonParser / util.json.JsonPrinter. This shall be changed.
+            //       This also should be extended to more attributes, like the type, theme, contacts, some booleans, etc.
+            switch (attribute) {
+                case "alias":
+                    collection.setAlias(value);
+                    break;
+                case "name":
+                    collection.setName(value);
+                    break;
+                case "description":
+                    collection.setDescription(value);
+                    break;
+                case "affiliation":
+                    collection.setAffiliation(value);
+                    break;
+                /* commenting out the code from the draft pr #9462:
+                case "versionPidsConduct":
+                    CollectionConduct conduct = CollectionConduct.findBy(value);
+                    if (conduct == null) {
+                        return badRequest("'" + value + "' is not one of [" +
+                            String.join(",", CollectionConduct.asList()) + "]");
+                    }
+                    collection.setDatasetVersionPidConduct(conduct);
+                    break;
+                 */
+                case "filePIDsEnabled":
+                    if(!user.isSuperuser()) {
+                        return forbidden("You must be a superuser to change this setting");
+                    }
+                    if(!settingsService.isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) {
+                        return forbidden("Changing File PID policy per collection is not enabled on this server");
+                    }
+                    collection.setFilePIDsEnabled(parseBooleanOrDie(value));
+                    break;
+                default:
+                    return badRequest("'" + attribute + "' is not a supported attribute");
+            }
+            
+            // Off to persistence layer
+            execCommand(new UpdateDataverseCommand(collection, null, null, dvRequest, null));
+    
+            // Also return modified collection to user
+            return ok("Update successful", JsonPrinter.json(collection));
+        
+        // TODO: This is an anti-pattern, necessary due to this bean being an EJB, causing very noisy and unnecessary
+        //       logging by the EJB container for bubbling exceptions. (It would be handled by the error handlers.)
+        } catch (WrappedResponse e) {
+            return e.getResponse();
+        }
     }
 
     @DELETE
+    @AuthRequired
     @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}")
-    public Response deleteDataverseLinkingDataverse(@PathParam("linkingDataverseId") String linkingDataverseId, @PathParam("linkedDataverseId") String linkedDataverseId) {
+    public Response deleteDataverseLinkingDataverse(@Context ContainerRequestContext crc, @PathParam("linkingDataverseId") String linkingDataverseId, @PathParam("linkedDataverseId") String linkedDataverseId) {
         boolean index = true;
         return response(req -> {
             execCommand(new DeleteDataverseLinkingDataverseCommand(req, findDataverseOrDie(linkingDataverseId), findDataverseLinkingDataverseOrDie(linkingDataverseId, linkedDataverseId), index));
             return ok("Link from Dataverse " + linkingDataverseId + " to linked Dataverse " + linkedDataverseId + " deleted");
-        });
+        }, getRequestUser(crc));
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/metadatablocks")
-    public Response listMetadataBlocks(@PathParam("identifier") String dvIdtf) {
+    public Response listMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
         try {
             JsonArrayBuilder arr = Json.createArrayBuilder();
-            final List<MetadataBlock> blocks = execCommand(new ListMetadataBlocksCommand(createDataverseRequest(findUserOrDie()), findDataverseOrDie(dvIdtf)));
+            final List<MetadataBlock> blocks = execCommand(new ListMetadataBlocksCommand(createDataverseRequest(getRequestUser(crc)), findDataverseOrDie(dvIdtf)));
             for (MetadataBlock mdb : blocks) {
                 arr.add(brief.json(mdb));
             }
@@ -589,9 +724,10 @@ public Response listMetadataBlocks(@PathParam("identifier") String dvIdtf) {
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/metadatablocks")
     @Produces(MediaType.APPLICATION_JSON)
-    public Response setMetadataBlocks(@PathParam("identifier") String dvIdtf, String blockIds) {
+    public Response setMetadataBlocks(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String blockIds) {
 
         List<MetadataBlock> blocks = new LinkedList<>();
         try {
@@ -609,7 +745,7 @@ public Response setMetadataBlocks(@PathParam("identifier") String dvIdtf, String
         }
 
         try {
-            execCommand(new UpdateDataverseMetadataBlocksCommand.SetBlocks(createDataverseRequest(findUserOrDie()), findDataverseOrDie(dvIdtf), blocks));
+            execCommand(new UpdateDataverseMetadataBlocksCommand.SetBlocks(createDataverseRequest(getRequestUser(crc)), findDataverseOrDie(dvIdtf), blocks));
             return ok("Metadata blocks of dataverse " + dvIdtf + " updated.");
 
         } catch (WrappedResponse ex) {
@@ -618,15 +754,17 @@ public Response setMetadataBlocks(@PathParam("identifier") String dvIdtf, String
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/metadatablocks/:isRoot")
-    public Response getMetadataRoot_legacy(@PathParam("identifier") String dvIdtf) {
-        return getMetadataRoot(dvIdtf);
+    public Response getMetadataRoot_legacy(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
+        return getMetadataRoot(crc, dvIdtf);
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/metadatablocks/isRoot")
     @Produces(MediaType.APPLICATION_JSON)
-    public Response getMetadataRoot(@PathParam("identifier") String dvIdtf) {
+    public Response getMetadataRoot(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
         return response(req -> {
             final Dataverse dataverse = findDataverseOrDie(dvIdtf);
             if (permissionSvc.request(req)
@@ -636,38 +774,41 @@ public Response getMetadataRoot(@PathParam("identifier") String dvIdtf) {
             } else {
                 return error(Status.FORBIDDEN, "Not authorized");
             }
-        });
+        }, getRequestUser(crc));
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/metadatablocks/:isRoot")
     @Produces(MediaType.APPLICATION_JSON)
     @Consumes(MediaType.WILDCARD)
-    public Response setMetadataRoot_legacy(@PathParam("identifier") String dvIdtf, String body) {
-        return setMetadataRoot(dvIdtf, body);
+    public Response setMetadataRoot_legacy(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String body) {
+        return setMetadataRoot(crc, dvIdtf, body);
     }
 
     @PUT
+    @AuthRequired
     @Path("{identifier}/metadatablocks/isRoot")
     @Produces(MediaType.APPLICATION_JSON)
     @Consumes(MediaType.WILDCARD)
-    public Response setMetadataRoot(@PathParam("identifier") String dvIdtf, String body) {
+    public Response setMetadataRoot(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String body) {
         return response(req -> {
             final boolean root = parseBooleanOrDie(body);
             final Dataverse dataverse = findDataverseOrDie(dvIdtf);
             execCommand(new UpdateDataverseMetadataBlocksCommand.SetRoot(req, dataverse, root));
             return ok("Dataverse " + dataverse.getName() + " is now a metadata  " + (root ? "" : "non-") + "root");
-        });
+        }, getRequestUser(crc));
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/facets/")
     /**
      * return list of facets for the dataverse with alias `dvIdtf`
      */
-    public Response listFacets(@PathParam("identifier") String dvIdtf) {
+    public Response listFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             DataverseRequest r = createDataverseRequest(u);
             Dataverse dataverse = findDataverseOrDie(dvIdtf);
             JsonArrayBuilder fs = Json.createArrayBuilder();
@@ -681,6 +822,7 @@ public Response listFacets(@PathParam("identifier") String dvIdtf) {
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/facets")
     @Produces(MediaType.APPLICATION_JSON)
     /**
@@ -690,7 +832,7 @@ public Response listFacets(@PathParam("identifier") String dvIdtf) {
      * where foo.json contains a list of datasetField names, works as expected
      * (judging by the UI). This triggers a 500 when '-d @foo.json' is used.
      */
-    public Response setFacets(@PathParam("identifier") String dvIdtf, String facetIds) {
+    public Response setFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String facetIds) {
 
         List<DatasetFieldType> facets = new LinkedList<>();
         for (JsonString facetId : Util.asJsonArray(facetIds).getValuesAs(JsonString.class)) {
@@ -706,7 +848,7 @@ public Response setFacets(@PathParam("identifier") String dvIdtf, String facetId
         try {
             Dataverse dataverse = findDataverseOrDie(dvIdtf);
             // by passing null for Featured Dataverses and DataverseFieldTypeInputLevel, those are not changed
-            execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(findUserOrDie()), null));
+            execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(getRequestUser(crc)), null));
             return ok("Facets of dataverse " + dvIdtf + " updated.");
 
         } catch (WrappedResponse ex) {
@@ -715,11 +857,12 @@ public Response setFacets(@PathParam("identifier") String dvIdtf, String facetId
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/metadatablockfacets")
     @Produces(MediaType.APPLICATION_JSON)
-    public Response listMetadataBlockFacets(@PathParam("identifier") String dvIdtf) {
+    public Response listMetadataBlockFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             DataverseRequest request = createDataverseRequest(u);
             Dataverse dataverse = findDataverseOrDie(dvIdtf);
             List<DataverseMetadataBlockFacet> metadataBlockFacets = Optional.ofNullable(execCommand(new ListMetadataBlockFacetsCommand(request, dataverse))).orElse(Collections.emptyList());
@@ -734,10 +877,11 @@ public Response listMetadataBlockFacets(@PathParam("identifier") String dvIdtf)
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/metadatablockfacets")
     @Consumes(MediaType.APPLICATION_JSON)
     @Produces(MediaType.APPLICATION_JSON)
-    public Response setMetadataBlockFacets(@PathParam("identifier") String dvIdtf, List<String> metadataBlockNames) {
+    public Response setMetadataBlockFacets(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, List<String> metadataBlockNames) {
         try {
             Dataverse dataverse = findDataverseOrDie(dvIdtf);
 
@@ -758,7 +902,7 @@ public Response setMetadataBlockFacets(@PathParam("identifier") String dvIdtf, L
                 metadataBlockFacets.add(metadataBlockFacet);
             }
 
-            execCommand(new UpdateMetadataBlockFacetsCommand(createDataverseRequest(findUserOrDie()), dataverse, metadataBlockFacets));
+            execCommand(new UpdateMetadataBlockFacetsCommand(createDataverseRequest(getRequestUser(crc)), dataverse, metadataBlockFacets));
             return ok(String.format("Metadata block facets updated. DataverseId: %s blocks: %s", dvIdtf, metadataBlockNames));
 
         } catch (WrappedResponse ex) {
@@ -767,10 +911,11 @@ public Response setMetadataBlockFacets(@PathParam("identifier") String dvIdtf, L
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/metadatablockfacets/isRoot")
     @Consumes(MediaType.APPLICATION_JSON)
     @Produces(MediaType.APPLICATION_JSON)
-    public Response updateMetadataBlockFacetsRoot(@PathParam("identifier") String dvIdtf, String body) {
+    public Response updateMetadataBlockFacetsRoot(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, String body) {
         try {
             final boolean blockFacetsRoot = parseBooleanOrDie(body);
             Dataverse dataverse = findDataverseOrDie(dvIdtf);
@@ -778,7 +923,7 @@ public Response updateMetadataBlockFacetsRoot(@PathParam("identifier") String dv
                 return ok(String.format("No update needed, dataverse already consistent with new value. DataverseId: %s blockFacetsRoot: %s", dvIdtf, blockFacetsRoot));
             }
 
-            execCommand(new UpdateMetadataBlockFacetRootCommand(createDataverseRequest(findUserOrDie()), dataverse, blockFacetsRoot));
+            execCommand(new UpdateMetadataBlockFacetRootCommand(createDataverseRequest(getRequestUser(crc)), dataverse, blockFacetsRoot));
             return ok(String.format("Metadata block facets root updated. DataverseId: %s blockFacetsRoot: %s", dvIdtf, blockFacetsRoot));
 
         } catch (WrappedResponse ex) {
@@ -791,8 +936,9 @@ public Response updateMetadataBlockFacetsRoot(@PathParam("identifier") String dv
     // (2438-4295-dois-for-files branch) such that a contributor API token no longer allows this method
     // to be called without a PermissionException being thrown.
     @GET
+    @AuthRequired
     @Path("{identifier}/contents")
-    public Response listContent(@PathParam("identifier") String dvIdtf) throws WrappedResponse {
+    public Response listContent(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) throws WrappedResponse {
 
         DvObject.Visitor<JsonObjectBuilder> ser = new DvObject.Visitor<JsonObjectBuilder>() {
             @Override
@@ -818,43 +964,102 @@ public JsonObjectBuilder visit(DataFile df) {
                         .stream()
                         .map(dvo -> (JsonObjectBuilder) dvo.accept(ser))
                         .collect(toJsonArray())
-        ));
+        ), getRequestUser(crc));
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/storagesize")
-    public Response getStorageSize(@PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) throws WrappedResponse {
+    public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) throws WrappedResponse {
                 
         return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.datasize"),
-                execCommand(new GetDataverseStorageSizeCommand(req, findDataverseOrDie(dvIdtf), includeCached)))));
+                execCommand(new GetDataverseStorageSizeCommand(req, findDataverseOrDie(dvIdtf), includeCached)))), getRequestUser(crc));
+    }
+    
+    @GET
+    @AuthRequired
+    @Path("{identifier}/storage/quota")
+    public Response getCollectionQuota(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) throws WrappedResponse {
+        try {
+            Long bytesAllocated = execCommand(new GetCollectionQuotaCommand(createDataverseRequest(getRequestUser(crc)), findDataverseOrDie(dvIdtf)));
+            if (bytesAllocated != null) {
+                return ok(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.storage.quota.allocation"),bytesAllocated));
+            }
+            return ok(BundleUtil.getStringFromBundle("dataverse.storage.quota.notdefined"));
+        } catch (WrappedResponse ex) {
+            return ex.getResponse();
+        }
+    }
+    
+    @POST
+    @AuthRequired
+    @Path("{identifier}/storage/quota/{bytesAllocated}")
+    public Response setCollectionQuota(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("bytesAllocated") Long bytesAllocated) throws WrappedResponse {
+        try {
+            execCommand(new SetCollectionQuotaCommand(createDataverseRequest(getRequestUser(crc)), findDataverseOrDie(dvIdtf), bytesAllocated));
+            return ok(BundleUtil.getStringFromBundle("dataverse.storage.quota.updated"));
+        } catch (WrappedResponse ex) {
+            return ex.getResponse();
+        }
     }
     
+    @DELETE
+    @AuthRequired
+    @Path("{identifier}/storage/quota")
+    public Response deleteCollectionQuota(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) throws WrappedResponse {
+        try {
+            execCommand(new DeleteCollectionQuotaCommand(createDataverseRequest(getRequestUser(crc)), findDataverseOrDie(dvIdtf)));
+            return ok(BundleUtil.getStringFromBundle("dataverse.storage.quota.deleted"));
+        } catch (WrappedResponse ex) {
+            return ex.getResponse();
+        }
+    }
     
+    /**
+     *
+     * @param crc
+     * @param identifier
+     * @return
+     * @throws edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse 
+     * @todo: add an optional parameter that would force the recorded storage use
+     * to be recalculated (or should that be a POST version of this API?)
+     */
+    @GET
+    @AuthRequired
+    @Path("{identifier}/storage/use")
+    public Response getCollectionStorageUse(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier) throws WrappedResponse {
+        return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.storage.use"),
+                execCommand(new GetCollectionStorageUseCommand(req, findDataverseOrDie(identifier))))), getRequestUser(crc));
+    }
+
     @GET
+    @AuthRequired
     @Path("{identifier}/roles")
-    public Response listRoles(@PathParam("identifier") String dvIdtf) {
+    public Response listRoles(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
         return response(req -> ok(
                 execCommand(new ListRolesCommand(req, findDataverseOrDie(dvIdtf)))
                         .stream().map(r -> json(r))
                         .collect(toJsonArray())
-        ));
+        ), getRequestUser(crc));
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/roles")
-    public Response createRole(RoleDTO roleDto, @PathParam("identifier") String dvIdtf) {
-        return response(req -> ok(json(execCommand(new CreateRoleCommand(roleDto.asRole(), req, findDataverseOrDie(dvIdtf))))));
+    public Response createRole(@Context ContainerRequestContext crc, RoleDTO roleDto, @PathParam("identifier") String dvIdtf) {
+        return response(req -> ok(json(execCommand(new CreateRoleCommand(roleDto.asRole(), req, findDataverseOrDie(dvIdtf))))), getRequestUser(crc));
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/assignments")
-    public Response listAssignments(@PathParam("identifier") String dvIdtf) {
+    public Response listAssignments(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
         return response(req -> ok(
                 execCommand(new ListRoleAssignments(req, findDataverseOrDie(dvIdtf)))
                         .stream()
                         .map(a -> json(a))
                         .collect(toJsonArray())
-        ));
+        ), getRequestUser(crc));
     }
 
     /**
@@ -866,6 +1071,8 @@ public Response listAssignments(@PathParam("identifier") String dvIdtf) {
      */
 //    File tempDir;
 //
+//    TODO: Code duplicate in ThemeWidgetFragment. Maybe extract, make static and put some place else?
+//          Important: at least use JvmSettings.DOCROOT_DIRECTORY and not the hardcoded location!
 //    private void createTempDir(Dataverse editDv) {
 //        try {
 //            File tempRoot = java.nio.file.Files.createDirectories(Paths.get("../docroot/logos/temp")).toFile();
@@ -947,11 +1154,12 @@ public Response listAssignments(@PathParam("identifier") String dvIdtf) {
 //        }
 //    }
     @POST
+    @AuthRequired
     @Path("{identifier}/assignments")
-    public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey) {
+    public Response createAssignment(@Context ContainerRequestContext crc, RoleAssignmentDTO ra, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey) {
 
         try {
-            final DataverseRequest req = createDataverseRequest(findUserOrDie());
+            final DataverseRequest req = createDataverseRequest(getRequestUser(crc));
             final Dataverse dataverse = findDataverseOrDie(dvIdtf);
 
             RoleAssignee assignee = findAssignee(ra.getAssignee());
@@ -985,13 +1193,14 @@ public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier")
     }
 
     @DELETE
+    @AuthRequired
     @Path("{identifier}/assignments/{id}")
-    public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam("identifier") String dvIdtf) {
+    public Response deleteAssignment(@Context ContainerRequestContext crc, @PathParam("id") long assignmentId, @PathParam("identifier") String dvIdtf) {
         RoleAssignment ra = em.find(RoleAssignment.class, assignmentId);
         if (ra != null) {
             try {
                 findDataverseOrDie(dvIdtf);
-                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(findUserOrDie())));
+                execCommand(new RevokeRoleCommand(ra, createDataverseRequest(getRequestUser(crc))));
                 return ok("Role " + ra.getRole().getName()
                         + " revoked for assignee " + ra.getAssigneeIdentifier()
                         + " in " + ra.getDefinitionPoint().accept(DvObject.NamePrinter));
@@ -1004,11 +1213,12 @@ public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam(
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/actions/:publish")
-    public Response publishDataverse(@PathParam("identifier") String dvIdtf) {
+    public Response publishDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
         try {
             Dataverse dv = findDataverseOrDie(dvIdtf);
-            return ok(json(execCommand(new PublishDataverseCommand(createDataverseRequest(findAuthenticatedUserOrDie()), dv))));
+            return ok(json(execCommand(new PublishDataverseCommand(createDataverseRequest(getRequestAuthenticatedUserOrDie(crc)), dv))));
 
         } catch (WrappedResponse wr) {
             return wr.getResponse();
@@ -1016,8 +1226,9 @@ public Response publishDataverse(@PathParam("identifier") String dvIdtf) {
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/groups/")
-    public Response createExplicitGroup(ExplicitGroupDTO dto, @PathParam("identifier") String dvIdtf) {
+    public Response createExplicitGroup(@Context ContainerRequestContext crc, ExplicitGroupDTO dto, @PathParam("identifier") String dvIdtf) {
         return response(req -> {
             ExplicitGroupProvider prv = explicitGroupSvc.getProvider();
             ExplicitGroup newGroup = dto.apply(prv.makeGroup());
@@ -1026,36 +1237,41 @@ public Response createExplicitGroup(ExplicitGroupDTO dto, @PathParam("identifier
 
             String groupUri = String.format("%s/groups/%s", dvIdtf, newGroup.getGroupAliasInOwner());
             return created(groupUri, json(newGroup));
-        });
+        }, getRequestUser(crc));
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/groups/")
-    public Response listGroups(@PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey) {
+    public Response listGroups(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey) {
         return response(req -> ok(
                 execCommand(new ListExplicitGroupsCommand(req, findDataverseOrDie(dvIdtf)))
                         .stream().map(eg -> json(eg))
                         .collect(toJsonArray())
-        ));
+        ), getRequestUser(crc));
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/groups/{aliasInOwner}")
-    public Response getGroupByOwnerAndAliasInOwner(@PathParam("identifier") String dvIdtf,
-            @PathParam("aliasInOwner") String grpAliasInOwner) {
+    public Response getGroupByOwnerAndAliasInOwner(@Context ContainerRequestContext crc,
+                                                   @PathParam("identifier") String dvIdtf,
+                                                   @PathParam("aliasInOwner") String grpAliasInOwner) {
         return response(req -> ok(json(findExplicitGroupOrDie(findDataverseOrDie(dvIdtf),
                 req,
-                grpAliasInOwner))));
+                grpAliasInOwner))), getRequestUser(crc));
     }
     
     @GET
+    @AuthRequired
     @Path("{identifier}/guestbookResponses/")
-    public Response getGuestbookResponsesByDataverse(@PathParam("identifier") String dvIdtf,
+    public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf,
             @QueryParam("guestbookId") Long gbId, @Context HttpServletResponse response) {
 
+        Dataverse dv;
         try {
-            Dataverse dv = findDataverseOrDie(dvIdtf);
-            User u = findUserOrDie();
+            dv = findDataverseOrDie(dvIdtf);
+            User u = getRequestUser(crc);
             DataverseRequest req = createDataverseRequest(u);
             if (permissionSvc.request(req)
                     .on(dv)
@@ -1074,16 +1290,14 @@ public Response getGuestbookResponsesByDataverse(@PathParam("identifier") String
             public void write(OutputStream os) throws IOException,
                     WebApplicationException {
 
-                Dataverse dv = dataverseService.findByAlias(dvIdtf);
                 Map<Integer, Object> customQandAs = guestbookResponseService.mapCustomQuestionAnswersAsStrings(dv.getId(), gbId);
                 Map<Integer, String> datasetTitles = guestbookResponseService.mapDatasetTitles(dv.getId());
-                
+
                 List<Object[]> guestbookResults = guestbookResponseService.getGuestbookResults(dv.getId(), gbId);
                 os.write("Guestbook, Dataset, Dataset PID, Date, Type, File Name, File Id, File PID, User Name, Email, Institution, Position, Custom Questions\n".getBytes());
                 for (Object[] result : guestbookResults) {
                     StringBuilder sb = guestbookResponseService.convertGuestbookResponsesToCSV(customQandAs, datasetTitles, result);
                     os.write(sb.toString().getBytes());
-
                 }
             }
         };
@@ -1091,18 +1305,21 @@ public void write(OutputStream os) throws IOException,
     }
     
     @PUT
+    @AuthRequired
     @Path("{identifier}/groups/{aliasInOwner}")
-    public Response updateGroup(ExplicitGroupDTO groupDto,
+    public Response updateGroup(@Context ContainerRequestContext crc, ExplicitGroupDTO groupDto,
             @PathParam("identifier") String dvIdtf,
             @PathParam("aliasInOwner") String grpAliasInOwner) {
         return response(req -> ok(json(execCommand(
                 new UpdateExplicitGroupCommand(req,
-                        groupDto.apply(findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner)))))));
+                        groupDto.apply(findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner)))))), getRequestUser(crc));
     }
     
     @PUT
+    @AuthRequired
     @Path("{identifier}/defaultContributorRole/{roleAlias}")
     public Response updateDefaultContributorRole(
+            @Context ContainerRequestContext crc,
             @PathParam("identifier") String dvIdtf,
             @PathParam("roleAlias") String roleAlias) {
 
@@ -1138,7 +1355,7 @@ public Response updateDefaultContributorRole(
                 List<String> args = Arrays.asList(dv.getDisplayName(), defaultRoleName);
                 String retString = BundleUtil.getStringFromBundle("dataverses.api.update.default.contributor.role.success", args);
                 return ok(retString);
-            });
+            }, getRequestUser(crc));
 
         } catch (WrappedResponse wr) {
             return wr.getResponse();
@@ -1147,47 +1364,55 @@ public Response updateDefaultContributorRole(
     }
 
     @DELETE
+    @AuthRequired
     @Path("{identifier}/groups/{aliasInOwner}")
-    public Response deleteGroup(@PathParam("identifier") String dvIdtf,
-            @PathParam("aliasInOwner") String grpAliasInOwner) {
+    public Response deleteGroup(@Context ContainerRequestContext crc,
+                                @PathParam("identifier") String dvIdtf,
+                                @PathParam("aliasInOwner") String grpAliasInOwner) {
         return response(req -> {
             execCommand(new DeleteExplicitGroupCommand(req,
                     findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner)));
             return ok("Group " + dvIdtf + "/" + grpAliasInOwner + " deleted");
-        });
+        }, getRequestUser(crc));
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/groups/{aliasInOwner}/roleAssignees")
     @Consumes("application/json")
-    public Response addRoleAssingees(List<String> roleAssingeeIdentifiers,
-            @PathParam("identifier") String dvIdtf,
-            @PathParam("aliasInOwner") String grpAliasInOwner) {
+    public Response addRoleAssingees(@Context ContainerRequestContext crc,
+                                     List<String> roleAssingeeIdentifiers,
+                                     @PathParam("identifier") String dvIdtf,
+                                     @PathParam("aliasInOwner") String grpAliasInOwner) {
         return response(req -> ok(
                 json(
                     execCommand(
                                 new AddRoleAssigneesToExplicitGroupCommand(req,
                                         findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner),
-                                        new TreeSet<>(roleAssingeeIdentifiers))))));
+                                        new TreeSet<>(roleAssingeeIdentifiers))))), getRequestUser(crc));
     }
 
     @PUT
+    @AuthRequired
     @Path("{identifier}/groups/{aliasInOwner}/roleAssignees/{roleAssigneeIdentifier: .*}")
-    public Response addRoleAssingee(@PathParam("identifier") String dvIdtf,
-            @PathParam("aliasInOwner") String grpAliasInOwner,
-            @PathParam("roleAssigneeIdentifier") String roleAssigneeIdentifier) {
-        return addRoleAssingees(Collections.singletonList(roleAssigneeIdentifier), dvIdtf, grpAliasInOwner);
+    public Response addRoleAssingee(@Context ContainerRequestContext crc,
+                                    @PathParam("identifier") String dvIdtf,
+                                    @PathParam("aliasInOwner") String grpAliasInOwner,
+                                    @PathParam("roleAssigneeIdentifier") String roleAssigneeIdentifier) {
+        return addRoleAssingees(crc, Collections.singletonList(roleAssigneeIdentifier), dvIdtf, grpAliasInOwner);
     }
 
     @DELETE
+    @AuthRequired
     @Path("{identifier}/groups/{aliasInOwner}/roleAssignees/{roleAssigneeIdentifier: .*}")
-    public Response deleteRoleAssingee(@PathParam("identifier") String dvIdtf,
-            @PathParam("aliasInOwner") String grpAliasInOwner,
-            @PathParam("roleAssigneeIdentifier") String roleAssigneeIdentifier) {
+    public Response deleteRoleAssingee(@Context ContainerRequestContext crc,
+                                       @PathParam("identifier") String dvIdtf,
+                                       @PathParam("aliasInOwner") String grpAliasInOwner,
+                                       @PathParam("roleAssigneeIdentifier") String roleAssigneeIdentifier) {
         return response(req -> ok(json(execCommand(
                 new RemoveRoleAssigneesFromExplicitGroupCommand(req,
                         findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner),
-                        Collections.singleton(roleAssigneeIdentifier))))));
+                        Collections.singleton(roleAssigneeIdentifier))))), getRequestUser(crc));
     }
 
     private ExplicitGroup findExplicitGroupOrDie(DvObject dv, DataverseRequest req, String groupIdtf) throws WrappedResponse {
@@ -1199,10 +1424,11 @@ private ExplicitGroup findExplicitGroupOrDie(DvObject dv, DataverseRequest req,
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/links")
-    public Response listLinks(@PathParam("identifier") String dvIdtf) {
+    public Response listLinks(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             Dataverse dv = findDataverseOrDie(dvIdtf);
             if (!u.isSuperuser()) {
                 return error(Status.FORBIDDEN, "Not a superuser");
@@ -1238,10 +1464,11 @@ public Response listLinks(@PathParam("identifier") String dvIdtf) {
     }
 
     @POST
+    @AuthRequired
     @Path("{id}/move/{targetDataverseAlias}")
-    public Response moveDataverse(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
+    public Response moveDataverse(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             Dataverse dv = findDataverseOrDie(id);
             Dataverse target = findDataverseOrDie(targetDataverseAlias);
             if (target == null) {
@@ -1257,10 +1484,11 @@ public Response moveDataverse(@PathParam("id") String id, @PathParam("targetData
     }
 
     @PUT
+    @AuthRequired
     @Path("{linkedDataverseAlias}/link/{linkingDataverseAlias}")
-    public Response linkDataverse(@PathParam("linkedDataverseAlias") String linkedDataverseAlias, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
+    public Response linkDataverse(@Context ContainerRequestContext crc, @PathParam("linkedDataverseAlias") String linkedDataverseAlias, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             Dataverse linked = findDataverseOrDie(linkedDataverseAlias);
             Dataverse linking = findDataverseOrDie(linkingDataverseAlias);
             if (linked == null) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
index c9eb3638b90..e9f869ad8b9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
@@ -14,9 +14,9 @@
 import java.util.logging.Logger;
 
 import edu.harvard.iq.dataverse.dataaccess.OptionalAccessService;
-import javax.faces.context.FacesContext;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.UriInfo;
+import jakarta.faces.context.FacesContext;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.UriInfo;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java
index 2410da04072..bcb8799ec9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java
@@ -12,16 +12,17 @@
 import java.io.OutputStream;
 import java.io.IOException;
 
-import javax.ws.rs.WebApplicationException;
+import jakarta.ws.rs.WebApplicationException;
 
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.Response;
 
-import javax.ws.rs.ext.MessageBodyWriter;
-import javax.ws.rs.ext.Provider;
+import jakarta.ws.rs.ext.MessageBodyWriter;
+import jakarta.ws.rs.ext.Provider;
 
 import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.dataaccess.*;
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import edu.harvard.iq.dataverse.engine.command.Command;
@@ -43,12 +44,12 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.inject.Inject;
-import javax.ws.rs.ClientErrorException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.RedirectionException;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.core.HttpHeaders;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.ClientErrorException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.RedirectionException;
+import jakarta.ws.rs.ServiceUnavailableException;
+import jakarta.ws.rs.core.HttpHeaders;
 import org.apache.tika.mime.MimeType;
 import org.apache.tika.mime.MimeTypeException;
 import org.apache.tika.mime.MimeTypes;
@@ -206,14 +207,15 @@ public void writeTo(DownloadInstance di, Class<?> clazz, Type type, Annotation[]
                         redirect_url_str = null;
                     }
                 }
-                
-                if (systemConfig.isGlobusFileDownload() && systemConfig.getGlobusStoresList()
-                        .contains(DataAccess.getStorageDriverFromIdentifier(dataFile.getStorageIdentifier()))) {
+                String driverId = DataAccess.getStorageDriverFromIdentifier(dataFile.getStorageIdentifier());
+                if (systemConfig.isGlobusFileDownload() && (GlobusAccessibleStore.acceptsGlobusTransfers(driverId) || GlobusAccessibleStore.allowsGlobusReferences(driverId))) {
                     if (di.getConversionParam() != null) {
                         if (di.getConversionParam().equals("format")) {
 
                             if ("GlobusTransfer".equals(di.getConversionParamValue())) {
-                                redirect_url_str = globusService.getGlobusAppUrlForDataset(dataFile.getOwner(), false, dataFile);
+                                List<DataFile> downloadDFList = new ArrayList<DataFile>(1);
+                                downloadDFList.add(dataFile);
+                                redirect_url_str = globusService.getGlobusAppUrlForDataset(dataFile.getOwner(), false, downloadDFList);
                             }
                         }
                     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java
index 82938fd3687..1b74ab5479e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/EditDDI.java
@@ -1,9 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
-import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
@@ -27,27 +26,25 @@
 import edu.harvard.iq.dataverse.datavariable.VariableCategory;
 import edu.harvard.iq.dataverse.datavariable.VariableMetadataDDIParser;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
-import org.apache.solr.client.solrj.SolrServerException;
-
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.Path;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.PathParam;
+
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.PathParam;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLInputFactory;
 import javax.xml.stream.XMLStreamReader;
-import java.io.IOException;
 import java.io.InputStream;
 
-import java.util.concurrent.Future;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.List;
@@ -55,11 +52,9 @@
 import java.util.Map;
 import java.util.HashMap;
 import java.util.Collection;
-import java.util.Date;
-import java.sql.Timestamp;
 
 
-import javax.validation.ConstraintViolationException;
+import jakarta.validation.ConstraintViolationException;
 
 @Stateless
 @Path("edit")
@@ -95,9 +90,10 @@ public class EditDDI  extends AbstractApiBean {
 
 
     @PUT
-    @Consumes("application/xml")
+    @AuthRequired
     @Path("{fileId}")
-    public Response edit (InputStream body, @PathParam("fileId") String fileId) {
+    @Consumes("application/xml")
+    public Response edit(@Context ContainerRequestContext crc, InputStream body, @PathParam("fileId") String fileId) {
         DataFile dataFile = null;
         try {
             dataFile = findDataFileOrDie(fileId);
@@ -105,7 +101,7 @@ public Response edit (InputStream body, @PathParam("fileId") String fileId) {
         } catch (WrappedResponse ex) {
             return ex.getResponse();
         }
-        User apiTokenUser = checkAuth(dataFile);
+        User apiTokenUser = checkAuth(getRequestUser(crc), dataFile);
 
         if (apiTokenUser == null) {
             return unauthorized("Cannot edit metadata, access denied" );
@@ -244,11 +240,7 @@ private boolean createNewDraftVersion(ArrayList<VariableMetadata> neededToUpdate
         }
 
         boolean doNormalSolrDocCleanUp = true;
-        try {
-            Future<String> indexDatasetFuture = indexService.indexDataset(dataset, doNormalSolrDocCleanUp);
-        } catch (IOException | SolrServerException ex) {
-            logger.log(Level.SEVERE, "Couldn''t index dataset: " + ex.getMessage());
-        }
+        indexService.asyncIndexDataset(dataset, doNormalSolrDocCleanUp);
 
         return true;
     }
@@ -426,27 +418,10 @@ private boolean AreDefaultValues(VariableMetadata varMet) {
     }
 
 
-    private User checkAuth(DataFile dataFile) {
-
-        User apiTokenUser = null;
-
-        try {
-            apiTokenUser = findUserOrDie();
-        } catch (WrappedResponse wr) {
-            apiTokenUser = null;
-            logger.log(Level.FINE, "Message from findUserOrDie(): {0}", wr.getMessage());
+    private User checkAuth(User requestUser, DataFile dataFile) {
+        if (!permissionService.requestOn(createDataverseRequest(requestUser), dataFile.getOwner()).has(Permission.EditDataset)) {
+            return null;
         }
-
-        if (apiTokenUser != null) {
-            // used in an API context
-            if (!permissionService.requestOn(createDataverseRequest(apiTokenUser), dataFile.getOwner()).has(Permission.EditDataset)) {
-                apiTokenUser = null;
-            }
-        }
-
-        return apiTokenUser;
-
+        return requestUser;
     }
 }
-
-
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java
index e53b54482b8..1feac1141bb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/ExternalTools.java
@@ -4,15 +4,15 @@
 import edu.harvard.iq.dataverse.externaltools.ExternalTool;
 import edu.harvard.iq.dataverse.externaltools.ExternalToolServiceBean;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 
 @Path("admin/externalTools")
 public class ExternalTools extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java
index d9a94ee340b..8a178f8da62 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/FeedbackApi.java
@@ -2,49 +2,66 @@
 
 import edu.harvard.iq.dataverse.DataverseSession;
 import edu.harvard.iq.dataverse.DvObject;
-import edu.harvard.iq.dataverse.DvObjectServiceBean;
+import edu.harvard.iq.dataverse.MailServiceBean;
+import edu.harvard.iq.dataverse.SendFeedbackDialog;
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.feedback.Feedback;
 import edu.harvard.iq.dataverse.feedback.FeedbackUtil;
-import java.util.List;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonNumber;
-import javax.json.JsonObject;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.util.MailUtil;
+
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonNumber;
+import jakarta.json.JsonObject;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 
 @Path("admin/feedback")
 public class FeedbackApi extends AbstractApiBean {
 
-    @EJB
-    DvObjectServiceBean dvObjectSvc;
-
+    @EJB MailServiceBean mailService;
+    
+    /**
+     * This method mimics the contact form and sends an email to the contacts of the
+     * specified Collection/Dataset/DataFile, optionally ccing the support email
+     * address, or to the support email address when there is no target object.
+     * 
+     * !!!!! This should not be moved outside the /admin path unless/until some form
+     * of captcha or other spam-prevention mechanism is added. As is, it allows an
+     * unauthenticated user (with access to the /admin api path) to send email from
+     * anyone to any contacts in Dataverse. (It also does not do much to validate
+     * user input (e.g. to strip potentially malicious html, etc.)!!!!
+     **/
     @POST
     public Response submitFeedback(JsonObject jsonObject) throws AddressException {
-        JsonNumber jsonNumber = jsonObject.getJsonNumber("id");
-        DvObject recipient = null;
+        JsonNumber jsonNumber = jsonObject.getJsonNumber("targetId");
+        DvObject feedbackTarget = null;
         if (jsonNumber != null) {
-            recipient = dvObjectSvc.findDvObject(jsonNumber.longValue());
+            feedbackTarget =  dvObjSvc.findDvObject(jsonNumber.longValue());
+            if(feedbackTarget==null) {
+                return error(Status.BAD_REQUEST, "Feedback target object not found");
+            }
         }
         DataverseSession dataverseSession = null;
         String userMessage = jsonObject.getString("body");
-        String systemEmail = "support@librascholar.edu";
-        InternetAddress systemAddress = new InternetAddress(systemEmail);
+        String systemEmail = JvmSettings.SUPPORT_EMAIL.lookupOptional().orElse(settingsSvc.getValueForKey(SettingsServiceBean.Key.SystemEmail));
+        InternetAddress systemAddress = MailUtil.parseSystemAddress(systemEmail);
         String userEmail = jsonObject.getString("fromEmail");
         String messageSubject = jsonObject.getString("subject");
         String baseUrl = systemConfig.getDataverseSiteUrl();
         String installationBrandName = BrandingUtil.getInstallationBrandName();
         String supportTeamName = BrandingUtil.getSupportTeamName(systemAddress);
         JsonArrayBuilder jab = Json.createArrayBuilder();
-        List<Feedback> feedbacks = FeedbackUtil.gatherFeedback(recipient, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        feedbacks.forEach((feedback) -> {
-            jab.add(feedback.toJsonObjectBuilder());
-        });
+        Feedback feedback = FeedbackUtil.gatherFeedback(feedbackTarget, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, SendFeedbackDialog.ccSupport(feedbackTarget));
+        jab.add(feedback.toJsonObjectBuilder());
+        mailService.sendMail(feedback.getFromEmail(), feedback.getToEmail(), feedback.getCcEmail(), feedback.getSubject(), feedback.getBody());
         return ok(jab);
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index 965d56d355e..5d400ee1438 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -3,6 +3,8 @@
 import com.google.gson.Gson;
 import com.google.gson.JsonObject;
 import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DataFileServiceBean;
+import edu.harvard.iq.dataverse.DataFileTag;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetLock;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
@@ -11,9 +13,13 @@
 import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
+import edu.harvard.iq.dataverse.FileDownloadServiceBean;
 import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.GuestbookResponseServiceBean;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccessValidator;
 import edu.harvard.iq.dataverse.UserNotificationServiceBean;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
+import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
@@ -30,8 +36,8 @@
 import edu.harvard.iq.dataverse.engine.command.impl.RestrictFileCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UningestFileCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
+import io.gdcc.spi.export.ExportException;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool;
 import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
 import edu.harvard.iq.dataverse.ingest.IngestRequest;
@@ -43,32 +49,41 @@
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.URLTokenUtil;
+
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
+
+import java.io.IOException;
 import java.io.InputStream;
+import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.inject.Inject;
-import javax.json.Json;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import javax.ws.rs.core.UriInfo;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.stream.JsonParsingException;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.*;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+
+import static edu.harvard.iq.dataverse.util.json.JsonPrinter.jsonDT;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+
+import jakarta.ws.rs.core.UriInfo;
 import org.glassfish.jersey.media.multipart.FormDataBodyPart;
 import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
 import org.glassfish.jersey.media.multipart.FormDataParam;
@@ -96,7 +111,11 @@ public class Files extends AbstractApiBean {
     SettingsServiceBean settingsService;
     @Inject
     MakeDataCountLoggingServiceBean mdcLogService;
-    
+    @Inject
+    GuestbookResponseServiceBean guestbookResponseService;
+    @Inject
+    DataFileServiceBean dataFileServiceBean;
+
     private static final Logger logger = Logger.getLogger(Files.class.getName());
     
     
@@ -120,8 +139,9 @@ private void msgt(String m){
      * @return
      */
     @PUT
+    @AuthRequired
     @Path("{id}/restrict")
-    public Response restrictFileInDataset(@PathParam("id") String fileToRestrictId, String restrictStr) {
+    public Response restrictFileInDataset(@Context ContainerRequestContext crc, @PathParam("id") String fileToRestrictId, String restrictStr) {
         //create request
         DataverseRequest dataverseRequest = null;
         //get the datafile
@@ -133,12 +153,8 @@ public Response restrictFileInDataset(@PathParam("id") String fileToRestrictId,
         }
 
         boolean restrict = Boolean.valueOf(restrictStr);
-  
-        try {
-            dataverseRequest = createDataverseRequest(findUserOrDie());
-        } catch (WrappedResponse wr) {
-            return error(BAD_REQUEST, "Couldn't find user to execute command: " + wr.getLocalizedMessage());
-        }
+
+        dataverseRequest = createDataverseRequest(getRequestUser(crc));
 
         // try to restrict the datafile
         try {
@@ -177,9 +193,11 @@ public Response restrictFileInDataset(@PathParam("id") String fileToRestrictId,
      * @return 
      */
     @POST
+    @AuthRequired
     @Path("{id}/replace")
     @Consumes(MediaType.MULTIPART_FORM_DATA)
     public Response replaceFileInDataset(
+                    @Context ContainerRequestContext crc,
                     @PathParam("id") String fileIdOrPersistentId,
                     @FormDataParam("jsonData") String jsonData,
                     @FormDataParam("file") InputStream testFileInputStream,
@@ -190,15 +208,8 @@ public Response replaceFileInDataset(
         if (!systemConfig.isHTTPUpload()) {
             return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled"));
         }
-        // (1) Get the user from the API key
-        User authUser;
-        try {
-            authUser = findUserOrDie();
-        } catch (AbstractApiBean.WrappedResponse ex) {
-            return error(Response.Status.FORBIDDEN, 
-                    BundleUtil.getStringFromBundle("file.addreplace.error.auth")
-                    );
-        }
+        // (1) Get the user from the ContainerRequestContext
+        User authUser = getRequestUser(crc);
 
         // (2) Check/Parse the JSON (if uploaded)  
         Boolean forceReplace = false;
@@ -317,12 +328,61 @@ public Response replaceFileInDataset(
         }
             
     } // end: replaceFileInDataset
+
+    /**
+     * Delete an Existing File 
+     * 
+     * @param id file ID or peristent ID
+     */
+    @DELETE
+    @AuthRequired
+    @Path("{id}")
+    public Response deleteFileInDataset(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId){
+        // (1) Get the user from the API key and create request
+        User authUser = getRequestUser(crc);
+        DataverseRequest dvRequest = createDataverseRequest(authUser);
+
+        // (2) Delete
+        boolean deletePhysicalFile = false;
+        try {
+            DataFile dataFile = findDataFileOrDie(fileIdOrPersistentId);
+            FileMetadata fileToDelete = dataFile.getLatestFileMetadata();
+            Dataset dataset = dataFile.getOwner();
+            DatasetVersion v = dataset.getOrCreateEditVersion();
+            deletePhysicalFile = !dataFile.isReleased();
+
+            UpdateDatasetVersionCommand update_cmd = new UpdateDatasetVersionCommand(dataset, dvRequest,  Arrays.asList(fileToDelete), v);
+            update_cmd.setValidateLenient(true);
+
+            try {
+                commandEngine.submit(update_cmd);
+            } catch (CommandException ex) {
+                return error(BAD_REQUEST, "Delete failed for file ID " + fileIdOrPersistentId + " (CommandException): " + ex.getMessage());
+            } catch (EJBException ex) {
+                return error(BAD_REQUEST, "Delete failed for file ID " + fileIdOrPersistentId + "(EJBException): " + ex.getMessage());
+            }
+    
+            if (deletePhysicalFile) {
+                try {
+                    fileService.finalizeFileDelete(dataFile.getId(), fileService.getPhysicalFileToDelete(dataFile));
+                } catch (IOException ioex) {
+                    logger.warning("Failed to delete the physical file associated with the deleted datafile id="
+                            + dataFile.getId() + ", storage location: " + fileService.getPhysicalFileToDelete(dataFile));
+                }
+            }
+        } catch (WrappedResponse wr) {
+            return wr.getResponse();
+        }
+
+        return ok(deletePhysicalFile);
+    }
     
     //Much of this code is taken from the replace command, 
     //simplified as we aren't actually switching files
     @POST
+    @AuthRequired
     @Path("{id}/metadata")
-    public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData,
+    public Response updateFileMetadata(@Context ContainerRequestContext crc, @FormDataParam("jsonData") String jsonData,
                     @PathParam("id") String fileIdOrPersistentId
         ) throws DataFileTagException, CommandException {
         
@@ -331,7 +391,7 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData,
         try {
             DataverseRequest req;
             try {
-                req = createDataverseRequest(findUserOrDie());
+                req = createDataverseRequest(getRequestUser(crc));
             } catch (Exception e) {
                 return error(BAD_REQUEST, "Error attempting to request information. Maybe a bad API token?");
             }
@@ -342,8 +402,6 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData,
                 return error(BAD_REQUEST, "Error attempting get the requested data file.");
             }
 
-            
-            User authUser = findUserOrDie();
 
             //You shouldn't be trying to edit a datafile that has been replaced
             List<Long> result = em.createNamedQuery("DataFile.findDataFileThatReplacedId", Long.class)
@@ -403,7 +461,7 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData,
                     return error(Response.Status.BAD_REQUEST, "An error has occurred attempting to update the requested DataFile. It is not part of the current version of the Dataset.");
                 }
 
-                javax.json.JsonObject jsonObject = JsonUtil.getJsonObject(jsonData);
+                jakarta.json.JsonObject jsonObject = JsonUtil.getJsonObject(jsonData);
                 String incomingLabel = jsonObject.getString("label", null);
                 String incomingDirectoryLabel = jsonObject.getString("directoryLabel", null);
                 String existingLabel = df.getFileMetadata().getLabel();
@@ -443,23 +501,25 @@ public Response updateFileMetadata(@FormDataParam("jsonData") String jsonData,
                 .build();
     }
     
-    @GET                    
+    @GET
+    @AuthRequired
     @Path("{id}/draft")
-    public Response getFileDataDraft(@PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WrappedResponse, Exception {
-        return getFileDataResponse(fileIdOrPersistentId, uriInfo, headers, response, true);
+    public Response getFileDataDraft(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WrappedResponse, Exception {
+        return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, true);
     }
     
-    @GET                             
+    @GET
+    @AuthRequired
     @Path("{id}")
-    public Response getFileData(@PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WrappedResponse, Exception {
-          return getFileDataResponse(fileIdOrPersistentId, uriInfo, headers, response, false);
+    public Response getFileData(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws WrappedResponse, Exception {
+          return getFileDataResponse(getRequestUser(crc), fileIdOrPersistentId, uriInfo, headers, response, false);
     }
     
-    private Response getFileDataResponse(String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response, boolean draft ){
+    private Response getFileDataResponse(User user, String fileIdOrPersistentId, UriInfo uriInfo, HttpHeaders headers, HttpServletResponse response, boolean draft ){
         
         DataverseRequest req;
         try {
-            req = createDataverseRequest(findUserOrDie());
+            req = createDataverseRequest(user);
         } catch (Exception e) {
             return error(BAD_REQUEST, "Error attempting to request information. Maybe a bad API token?");
         }
@@ -507,19 +567,20 @@ private Response getFileDataResponse(String fileIdOrPersistentId, UriInfo uriInf
         } 
         
         return Response.ok(Json.createObjectBuilder()
-                .add("status", STATUS_OK)
+                .add("status", ApiConstants.STATUS_OK)
                 .add("data", json(fm)).build())
                 .type(MediaType.APPLICATION_JSON)
                 .build();
     }
     
-    @GET                             
+    @GET
+    @AuthRequired
     @Path("{id}/metadata")
-    public Response getFileMetadata(@PathParam("id") String fileIdOrPersistentId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, Boolean getDraft) throws WrappedResponse, Exception {
+    public Response getFileMetadata(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, Boolean getDraft) throws WrappedResponse, Exception {
         //ToDo - versionId is not used - can't get metadata for earlier versions
         DataverseRequest req;
             try {
-                req = createDataverseRequest(findUserOrDie());
+                req = createDataverseRequest(getRequestUser(crc));
             } catch (Exception e) {
                 return error(BAD_REQUEST, "Error attempting to request information. Maybe a bad API token?");
             }
@@ -555,15 +616,17 @@ public Response getFileMetadata(@PathParam("id") String fileIdOrPersistentId, @P
                 .build();
     }
     
-    @GET                    
+    @GET
+    @AuthRequired
     @Path("{id}/metadata/draft")
-    public Response getFileMetadataDraft(@PathParam("id") String fileIdOrPersistentId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, Boolean getDraft) throws WrappedResponse, Exception {
-        return getFileMetadata(fileIdOrPersistentId, versionId, uriInfo, headers, response, true);
+    public Response getFileMetadataDraft(@Context ContainerRequestContext crc, @PathParam("id") String fileIdOrPersistentId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response, Boolean getDraft) throws WrappedResponse, Exception {
+        return getFileMetadata(crc, fileIdOrPersistentId, versionId, uriInfo, headers, response, true);
     }
 
-    @Path("{id}/uningest")
     @POST
-    public Response uningestDatafile(@PathParam("id") String id) {
+    @AuthRequired
+    @Path("{id}/uningest")
+    public Response uningestDatafile(@Context ContainerRequestContext crc, @PathParam("id") String id) {
 
         DataFile dataFile;
         try {
@@ -580,7 +643,7 @@ public Response uningestDatafile(@PathParam("id") String id) {
         }
 
         try {
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
             execCommand(new UningestFileCommand(req, dataFile));
             Long dataFileId = dataFile.getId();
             dataFile = fileService.find(dataFileId);
@@ -592,22 +655,22 @@ public Response uningestDatafile(@PathParam("id") String id) {
         }
 
     }
-    
+
     // reingest attempts to queue an *existing* DataFile 
     // for tabular ingest. It can be used on non-tabular datafiles; to try to 
     // ingest a file that has previously failed ingest, or to ingest a file of a
     // type for which ingest was not previously supported. 
     // We are considering making it possible, in the future, to reingest 
     // a datafile that's already ingested as Tabular; for example, to address a 
-    // bug that has been found in an ingest plugin. 
-    
-    @Path("{id}/reingest")
+    // bug that has been found in an ingest plugin.
     @POST
-    public Response reingest(@PathParam("id") String id) {
+    @AuthRequired
+    @Path("{id}/reingest")
+    public Response reingest(@Context ContainerRequestContext crc, @PathParam("id") String id) {
 
         AuthenticatedUser u;
         try {
-            u = findAuthenticatedUserOrDie();
+            u = getRequestAuthenticatedUserOrDie(crc);
             if (!u.isSuperuser()) {
                 return error(Response.Status.FORBIDDEN, "This API call can be used by superusers only");
             }
@@ -654,7 +717,7 @@ public Response reingest(@PathParam("id") String id) {
         dataFile = fileService.save(dataFile);
         
         // queue the data ingest job for asynchronous execution: 
-        String status = ingestService.startIngestJobs(new ArrayList<>(Arrays.asList(dataFile)), u);
+        String status = ingestService.startIngestJobs(dataset.getId(), new ArrayList<>(Arrays.asList(dataFile)), u);
         
         if (!StringUtil.isEmpty(status)) {
             // This most likely indicates some sort of a problem (for example, 
@@ -670,13 +733,19 @@ public Response reingest(@PathParam("id") String id) {
 
     }
 
-    @Path("{id}/redetect")
     @POST
-    public Response redetectDatafile(@PathParam("id") String id, @QueryParam("dryRun") boolean dryRun) {
+    @AuthRequired
+    @Path("{id}/redetect")
+    public Response redetectDatafile(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("dryRun") boolean dryRun) {
         try {
             DataFile dataFileIn = findDataFileOrDie(id);
+            // Ingested Files have mimetype = text/tab-separated-values
+            // No need to redetect
+            if (dataFileIn.isTabularData()) {
+                return error(Response.Status.BAD_REQUEST, "The file is an ingested tabular file.");
+            }
             String originalContentType = dataFileIn.getContentType();
-            DataFile dataFileOut = execCommand(new RedetectFileTypeCommand(createDataverseRequest(findUserOrDie()), dataFileIn, dryRun));
+            DataFile dataFileOut = execCommand(new RedetectFileTypeCommand(createDataverseRequest(getRequestUser(crc)), dataFileIn, dryRun));
             NullSafeJsonBuilder result = NullSafeJsonBuilder.jsonObjectBuilder()
                     .add("dryRun", dryRun)
                     .add("oldContentType", originalContentType)
@@ -687,11 +756,12 @@ public Response redetectDatafile(@PathParam("id") String id, @QueryParam("dryRun
         }
     }
 
-    @Path("{id}/extractNcml")
     @POST
-    public Response extractNcml(@PathParam("id") String id) {
+    @AuthRequired
+    @Path("{id}/extractNcml")
+    public Response extractNcml(@Context ContainerRequestContext crc, @PathParam("id") String id) {
         try {
-            AuthenticatedUser au = findAuthenticatedUserOrDie();
+            AuthenticatedUser au = getRequestAuthenticatedUserOrDie(crc);
             if (!au.isSuperuser()) {
                 // We can always make a command in the future if there's a need
                 // for non-superusers to call this API.
@@ -734,33 +804,131 @@ private void exportDatasetMetadata(SettingsServiceBean settingsServiceBean, Data
     // This supports the cases where a tool is accessing a restricted resource (e.g.
     // preview of a draft file), or public case.
     @GET
+    @AuthRequired
     @Path("{id}/metadata/{fmid}/toolparams/{tid}")
-    public Response getExternalToolFMParams(@PathParam("tid") long externalToolId,
+    public Response getExternalToolFMParams(@Context ContainerRequestContext crc, @PathParam("tid") long externalToolId,
             @PathParam("id") String fileId, @PathParam("fmid") long fmid, @QueryParam(value = "locale") String locale) {
+        ExternalTool externalTool = externalToolService.findById(externalToolId);
+        if(externalTool == null) {
+            return error(BAD_REQUEST, "External tool not found.");
+        }
+        if (!ExternalTool.Scope.FILE.equals(externalTool.getScope())) {
+            return error(BAD_REQUEST, "External tool does not have file scope.");
+        }
+        ApiToken apiToken = null;
+        User user = getRequestUser(crc);
+        apiToken = authSvc.getValidApiTokenForUser(user);
+        FileMetadata target = fileSvc.findFileMetadata(fmid);
+        if (target == null) {
+            return error(BAD_REQUEST, "FileMetadata not found.");
+        }
+
+        URLTokenUtil eth = null;
+
+        eth = new ExternalToolHandler(externalTool, target.getDataFile(), apiToken, target, locale);
+        return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters())), JsonUtil.getJsonArray(externalTool.getAllowedApiCalls())));
+    }
+    
+    @GET
+    @Path("fixityAlgorithm")
+    public Response getFixityAlgorithm() {
+        return ok(systemConfig.getFileFixityChecksumAlgorithm().toString());
+    }
+
+    @GET
+    @AuthRequired
+    @Path("{id}/downloadCount")
+    public Response getFileDownloadCount(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) {
+        return response(req -> {
+            DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId)));
+            return ok(guestbookResponseService.getDownloadCountByDataFileId(dataFile.getId()).toString());
+        }, getRequestUser(crc));
+    }
+
+    @GET
+    @AuthRequired
+    @Path("{id}/dataTables")
+    public Response getFileDataTables(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) {
+        DataFile dataFile;
         try {
-            ExternalTool externalTool = externalToolService.findById(externalToolId);
-            if(externalTool == null) {
-                return error(BAD_REQUEST, "External tool not found.");
+            dataFile = findDataFileOrDie(dataFileId);
+        } catch (WrappedResponse e) {
+            return notFound("File not found for given id.");
+        }
+        if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) {
+            DataverseRequest dataverseRequest = createDataverseRequest(getRequestUser(crc));
+            boolean hasPermissionToDownloadFile = permissionSvc.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile);
+            if (!hasPermissionToDownloadFile) {
+                return forbidden("Insufficient permissions to access the requested information.");
             }
-            if (!ExternalTool.Scope.FILE.equals(externalTool.getScope())) {
-                return error(BAD_REQUEST, "External tool does not have file scope.");
+        }
+        if (!dataFile.isTabularData()) {
+            return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported"));
+        }
+        return ok(jsonDT(dataFile.getDataTables()));
+    }
+
+    @POST
+    @AuthRequired
+    @Path("{id}/metadata/categories")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response setFileCategories(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) {
+        return response(req -> {
+            DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId)));
+            jakarta.json.JsonObject jsonObject;
+            try (StringReader stringReader = new StringReader(jsonBody)) {
+                jsonObject = Json.createReader(stringReader).readObject();
+                JsonArray requestedCategoriesJson = jsonObject.getJsonArray("categories");
+                FileMetadata fileMetadata = dataFile.getFileMetadata();
+                for (JsonValue jsonValue : requestedCategoriesJson) {
+                    JsonString jsonString = (JsonString) jsonValue;
+                    fileMetadata.addCategoryByName(jsonString.getString());
+                }
+                execCommand(new UpdateDatasetVersionCommand(fileMetadata.getDataFile().getOwner(), req));
+                return ok("Categories of file " + dataFileId + " updated.");
+            } catch (JsonParsingException jpe) {
+                return badRequest("Error parsing Json: " + jpe.getMessage());
             }
-            ApiToken apiToken = null;
-            User u = findUserOrDie();
-            if (u instanceof AuthenticatedUser) {
-                apiToken = authSvc.findApiTokenByUser((AuthenticatedUser) u);
+        }, getRequestUser(crc));
+    }
+
+    @POST
+    @AuthRequired
+    @Path("{id}/metadata/tabularTags")
+    @Produces(MediaType.APPLICATION_JSON)
+    public Response setFileTabularTags(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) {
+        return response(req -> {
+            DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId)));
+            if (!dataFile.isTabularData()) {
+                return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported"));
             }
-            FileMetadata target = fileSvc.findFileMetadata(fmid);
-            if (target == null) {
-                return error(BAD_REQUEST, "FileMetadata not found.");
+            jakarta.json.JsonObject jsonObject;
+            try (StringReader stringReader = new StringReader(jsonBody)) {
+                jsonObject = Json.createReader(stringReader).readObject();
+                JsonArray requestedTabularTagsJson = jsonObject.getJsonArray("tabularTags");
+                for (JsonValue jsonValue : requestedTabularTagsJson) {
+                    JsonString jsonString = (JsonString) jsonValue;
+                    try {
+                        dataFile.addUniqueTagByLabel(jsonString.getString());
+                    } catch (IllegalArgumentException iax){
+                        return badRequest(iax.getMessage());
+                    }
+                }
+                execCommand(new UpdateDatasetVersionCommand(dataFile.getOwner(), req));
+                return ok("Tabular tags of file " + dataFileId + " updated.");
+            } catch (JsonParsingException jpe) {
+                return badRequest("Error parsing Json: " + jpe.getMessage());
             }
+        }, getRequestUser(crc));
+    }
 
-            ExternalToolHandler eth = null;
-
-            eth = new ExternalToolHandler(externalTool, target.getDataFile(), apiToken, target, locale);
-            return ok(eth.createPostBody(eth.getParams(JsonUtil.getJsonObject(externalTool.getToolParameters()))));
-        } catch (WrappedResponse wr) {
-            return wr.getResponse();
-        }
+    @GET
+    @AuthRequired
+    @Path("{id}/hasBeenDeleted")
+    public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) {
+        return response(req -> {
+            DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId)));
+            return ok(dataFileServiceBean.hasBeenDeleted(dataFile));
+        }, getRequestUser(crc));
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
index 5a587efadf3..d56a787c7ff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
@@ -9,26 +9,26 @@
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
 import edu.harvard.iq.dataverse.util.json.JsonParser;
 
-import javax.ejb.Stateless;
-import javax.interceptor.Interceptors;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.Stateless;
+import jakarta.interceptor.Interceptors;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
 
 import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.annotation.PostConstruct;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonString;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.PathParam;
+import jakarta.annotation.PostConstruct;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.PathParam;
 import static org.apache.commons.lang3.StringUtils.isNumeric;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java
index 9aea3adab8b..dfc9f48dd1a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.DataverseServiceBean;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -16,25 +16,28 @@
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
 import edu.harvard.iq.dataverse.util.json.JsonPrinter;
-import javax.json.JsonObjectBuilder;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import jakarta.json.JsonObjectBuilder;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 import java.io.IOException;
 import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 @Path("harvest/clients")
 public class HarvestingClients extends AbstractApiBean {
@@ -55,8 +58,9 @@ public class HarvestingClients extends AbstractApiBean {
      *  optionally, plain text output may be provided as well.
      */
     @GET
+    @AuthRequired
     @Path("/")
-    public Response harvestingClients(@QueryParam("key") String apiKey) throws IOException {
+    public Response harvestingClients(@Context ContainerRequestContext crc, @QueryParam("key") String apiKey) throws IOException {
         
         List<HarvestingClient> harvestingClients = null; 
         try {
@@ -80,7 +84,7 @@ public Response harvestingClients(@QueryParam("key") String apiKey) throws IOExc
             // the permission to view this harvesting client config. -- L.A. 4.4
             HarvestingClient retrievedHarvestingClient = null; 
             try {
-                DataverseRequest req = createDataverseRequest(findUserOrDie());
+                DataverseRequest req = createDataverseRequest(getRequestUser(crc));
                 retrievedHarvestingClient = execCommand( new GetHarvestingClientCommand(req, harvestingClient));
             } catch (Exception ex) {
                 // Don't do anything. 
@@ -97,8 +101,9 @@ public Response harvestingClients(@QueryParam("key") String apiKey) throws IOExc
     } 
     
     @GET
+    @AuthRequired
     @Path("{nickName}")
-    public Response harvestingClient(@PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException {
+    public Response harvestingClient(@Context ContainerRequestContext crc, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException {
         
         HarvestingClient harvestingClient = null; 
         try {
@@ -122,7 +127,7 @@ public Response harvestingClient(@PathParam("nickName") String nickName, @QueryP
             // findUserOrDie() and execCommand() both throw WrappedResponse 
             // exception, that already has a proper HTTP response in it. 
             
-            retrievedHarvestingClient = execCommand(new GetHarvestingClientCommand(createDataverseRequest(findUserOrDie()), harvestingClient));
+            retrievedHarvestingClient = execCommand(new GetHarvestingClientCommand(createDataverseRequest(getRequestUser(crc)), harvestingClient));
             logger.fine("retrieved Harvesting Client " + retrievedHarvestingClient.getName() + " with the GetHarvestingClient command.");
         } catch (WrappedResponse wr) {
             return wr.getResponse();
@@ -146,12 +151,13 @@ public Response harvestingClient(@PathParam("nickName") String nickName, @QueryP
     }
     
     @POST
+    @AuthRequired
     @Path("{nickName}")
-    public Response createHarvestingClient(String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
+    public Response createHarvestingClient(@Context ContainerRequestContext crc, String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
         // Per the discussion during the QA of PR #9174, we decided to make 
         // the create/edit APIs superuser-only (the delete API was already so)
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             if ((!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
                 throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can create harvesting clients."));
             }
@@ -159,8 +165,8 @@ public Response createHarvestingClient(String jsonBody, @PathParam("nickName") S
             return wr.getResponse();
         }
  
-        try ( StringReader rdr = new StringReader(jsonBody) ) {
-            JsonObject json = Json.createReader(rdr).readObject();
+        try {
+            JsonObject json = JsonUtil.getJsonObject(jsonBody);
             
             // Check that the client with this name doesn't exist yet: 
             // (we could simply let the command fail, but that does not result 
@@ -215,7 +221,7 @@ public Response createHarvestingClient(String jsonBody, @PathParam("nickName") S
             }
             ownerDataverse.getHarvestingClientConfigs().add(harvestingClient);
                         
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
             harvestingClient = execCommand(new CreateHarvestingClientCommand(req, harvestingClient));
             return created( "/harvest/clients/" + nickName, JsonPrinter.json(harvestingClient));
                     
@@ -230,10 +236,11 @@ public Response createHarvestingClient(String jsonBody, @PathParam("nickName") S
     }
     
     @PUT
+    @AuthRequired
     @Path("{nickName}")
-    public Response modifyHarvestingClient(String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
+    public Response modifyHarvestingClient(@Context ContainerRequestContext crc, String jsonBody, @PathParam("nickName") String nickName, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             if ((!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
                 throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can modify harvesting clients."));
             }
@@ -255,9 +262,9 @@ public Response modifyHarvestingClient(String jsonBody, @PathParam("nickName") S
         
         String ownerDataverseAlias = harvestingClient.getDataverse().getAlias();
         
-        try ( StringReader rdr = new StringReader(jsonBody) ) {
-            DataverseRequest req = createDataverseRequest(findUserOrDie());
-            JsonObject json = Json.createReader(rdr).readObject();
+        try {
+            DataverseRequest req = createDataverseRequest(getRequestUser(crc));
+            JsonObject json = JsonUtil.getJsonObject(jsonBody);
             
             HarvestingClient newHarvestingClient = new HarvestingClient(); 
             String newDataverseAlias = jsonParser().parseHarvestingClient(json, newHarvestingClient);
@@ -309,15 +316,16 @@ public Response modifyHarvestingClient(String jsonBody, @PathParam("nickName") S
     }
     
     @DELETE
+    @AuthRequired
     @Path("{nickName}")
-    public Response deleteHarvestingClient(@PathParam("nickName") String nickName) throws IOException {
+    public Response deleteHarvestingClient(@Context ContainerRequestContext crc, @PathParam("nickName") String nickName) throws IOException {
         // Deleting a client can take a while (if there's a large amnount of 
         // harvested content associated with it). So instead of calling the command
         // directly, we will be calling an async. service bean method. 
 
         
         try {
-            User u = findUserOrDie();
+            User u = getRequestUser(crc);
             if ((!(u instanceof AuthenticatedUser) || !u.isSuperuser())) {
                 throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete harvesting clients."));
             }
@@ -366,14 +374,15 @@ public Response deleteHarvestingClient(@PathParam("nickName") String nickName) t
     
     // This POST starts a new harvesting run:
     @POST
+    @AuthRequired
     @Path("{nickName}/run")
-    public Response startHarvestingJob(@PathParam("nickName") String clientNickname, @QueryParam("key") String apiKey) throws IOException {
+    public Response startHarvestingJob(@Context ContainerRequestContext crc, @PathParam("nickName") String clientNickname, @QueryParam("key") String apiKey) throws IOException {
         
         try {
             AuthenticatedUser authenticatedUser = null; 
             
             try {
-                authenticatedUser = findAuthenticatedUserOrDie();
+                authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             } catch (WrappedResponse wr) {
                 return error(Response.Status.UNAUTHORIZED, "Authentication required to use this API method");
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java
index c69eab0307e..308b910c425 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingServer.java
@@ -5,32 +5,35 @@
  */
 package edu.harvard.iq.dataverse.api;
 
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.harvest.server.OAISet;
 import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.JsonObjectBuilder;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 import java.io.IOException;
 import java.io.StringReader;
 import java.util.List;
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonReader;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -104,14 +107,15 @@ public Response oaiSet(@PathParam("specname") String spec, @QueryParam("key") St
      * "description":$optional_set_description,"definition":$set_search_query_string}.
      */
     @POST
+    @AuthRequired
     @Path("/add")
-    public Response createOaiSet(String jsonBody, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
+    public Response createOaiSet(@Context ContainerRequestContext crc, String jsonBody, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
         /*
 	     * authorization modeled after the UI (aka HarvestingSetsPage)
          */
         AuthenticatedUser dvUser;
         try {
-            dvUser = findAuthenticatedUserOrDie();
+            dvUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
@@ -173,12 +177,13 @@ public Response createOaiSet(String jsonBody, @QueryParam("key") String apiKey)
     }
 
     @PUT
+    @AuthRequired
     @Path("{specname}")
-    public Response modifyOaiSet(String jsonBody, @PathParam("specname") String spec, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
+    public Response modifyOaiSet(@Context ContainerRequestContext crc, String jsonBody, @PathParam("specname") String spec, @QueryParam("key") String apiKey) throws IOException, JsonParseException {
 
         AuthenticatedUser dvUser;
         try {
-            dvUser = findAuthenticatedUserOrDie();
+            dvUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
@@ -225,12 +230,13 @@ public Response modifyOaiSet(String jsonBody, @PathParam("specname") String spec
     }
     
     @DELETE
+    @AuthRequired
     @Path("{specname}")
-    public Response deleteOaiSet(@PathParam("specname") String spec, @QueryParam("key") String apiKey) {
+    public Response deleteOaiSet(@Context ContainerRequestContext crc, @PathParam("specname") String spec, @QueryParam("key") String apiKey) {
         
         AuthenticatedUser dvUser;
         try {
-            dvUser = findAuthenticatedUserOrDie();
+            dvUser = getRequestAuthenticatedUserOrDie(crc);
         } catch (WrappedResponse wr) {
             return wr.getResponse();
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java
index a7f4ee769b7..4910c460b6a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java
@@ -15,6 +15,7 @@
 import edu.harvard.iq.dataverse.DvObjectServiceBean;
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.RoleAssignment;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.search.SearchServiceBean;
@@ -46,24 +47,24 @@
 import java.util.Set;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.TimeoutException;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 import org.apache.solr.client.solrj.SolrServerException;
 
 @Path("admin/index")
@@ -196,7 +197,7 @@ private Response indexAllOrSubset(Long numPartitionsSelected, Long partitionIdTo
                     }
                 }
             }
-            if (sb.toString().equals("javax.ejb.EJBException: Transaction aborted javax.transaction.RollbackException java.lang.IllegalStateException ")) {
+            if (sb.toString().contains("java.lang.IllegalStateException ")) {
                 return ok("indexing went as well as can be expected... got java.lang.IllegalStateException but some indexing may have happened anyway");
             } else {
                 return error(Status.INTERNAL_SERVER_ERROR, sb.toString());
@@ -240,12 +241,7 @@ public Response indexTypeById(@PathParam("type") String type, @PathParam("id") L
                 Dataset dataset = datasetService.find(id);
                 if (dataset != null) {
                     boolean doNormalSolrDocCleanUp = true;
-                    try {
-                        Future<String> indexDatasetFuture = indexService.indexDataset(dataset, doNormalSolrDocCleanUp);
-                    } catch (IOException | SolrServerException e) {
-                        //
-                        return error(Status.BAD_REQUEST, writeFailureToLog(e.getLocalizedMessage(), dataset));
-                    }
+                    indexService.asyncIndexDataset(dataset, doNormalSolrDocCleanUp);
 
                     return ok("starting reindex of dataset " + id);
                 } else {
@@ -263,11 +259,7 @@ public Response indexTypeById(@PathParam("type") String type, @PathParam("id") L
                  * @todo How can we display the result to the user?
                  */
                 boolean doNormalSolrDocCleanUp = true;
-                try {
-                    Future<String> indexDatasetFuture = indexService.indexDataset(datasetThatOwnsTheFile, doNormalSolrDocCleanUp);
-                } catch (IOException | SolrServerException e) {
-                    writeFailureToLog(e.getLocalizedMessage(), datasetThatOwnsTheFile);
-                }
+                indexService.asyncIndexDataset(datasetThatOwnsTheFile, doNormalSolrDocCleanUp);
                 
                 return ok("started reindexing " + type + "/" + id);
             } else {
@@ -315,15 +307,11 @@ public Response indexDatasetByPersistentId(@QueryParam("persistentId") String pe
         }
         if (dataset != null) {
             boolean doNormalSolrDocCleanUp = true;
-            try {
-                Future<String> indexDatasetFuture = indexService.indexDataset(dataset, doNormalSolrDocCleanUp);
-            } catch (IOException | SolrServerException e) {
-                writeFailureToLog(e.getLocalizedMessage(), dataset);               
-            }
+            indexService.asyncIndexDataset(dataset, doNormalSolrDocCleanUp);
             JsonObjectBuilder data = Json.createObjectBuilder();
             data.add("message", "Reindexed dataset " + persistentId);
             data.add("id", dataset.getId());
-            data.add("persistentId", dataset.getGlobalIdString());
+            data.add("persistentId", dataset.getGlobalId().asString());
             JsonArrayBuilder versions = Json.createArrayBuilder();
             for (DatasetVersion version : dataset.getVersions()) {
                 JsonObjectBuilder versionObject = Json.createObjectBuilder();
@@ -636,15 +624,16 @@ public Response deleteTimestamp(@PathParam("dvObjectId") long dvObjectId) {
     }
 
     @GET
+    @AuthRequired
     @Path("filesearch")
-    public Response filesearch(@QueryParam("persistentId") String persistentId, @QueryParam("semanticVersion") String semanticVersion, @QueryParam("q") String userSuppliedQuery) {
+    public Response filesearch(@Context ContainerRequestContext crc, @QueryParam("persistentId") String persistentId, @QueryParam("semanticVersion") String semanticVersion, @QueryParam("q") String userSuppliedQuery) {
         Dataset dataset = datasetService.findByGlobalId(persistentId);
         if (dataset == null) {
             return error(Status.BAD_REQUEST, "Could not find dataset with persistent id " + persistentId);
         }
         User user = GuestUser.get();
         try {
-            AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie();
+            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             if (authenticatedUser != null) {
                 user = authenticatedUser;
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
index fd7824c15cf..40ce6cd25b7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
@@ -3,12 +3,12 @@
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonValue;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonValue;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
 
 @Path("info")
 public class Info extends AbstractApiBean {
@@ -22,14 +22,15 @@ public class Info extends AbstractApiBean {
     @GET
     @Path("settings/:DatasetPublishPopupCustomText")
     public Response getDatasetPublishPopupCustomText() {
-        String setting = settingsService.getValueForKey(SettingsServiceBean.Key.DatasetPublishPopupCustomText);
-        if (setting != null) {
-            return ok(Json.createObjectBuilder().add("message", setting));
-        } else {
-            return notFound("Setting " + SettingsServiceBean.Key.DatasetPublishPopupCustomText + " not found");
-        }
+        return getSettingResponseByKey(SettingsServiceBean.Key.DatasetPublishPopupCustomText);
     }
-    
+
+    @GET
+    @Path("settings/:MaxEmbargoDurationInMonths")
+    public Response getMaxEmbargoDurationInMonths() {
+        return getSettingResponseByKey(SettingsServiceBean.Key.MaxEmbargoDurationInMonths);
+    }
+
     @GET
     @Path("version")
     public Response getInfo() {
@@ -37,20 +38,42 @@ public Response getInfo() {
         String[] comps = versionStr.split("build",2);
         String version = comps[0].trim();
         JsonValue build = comps.length > 1 ? Json.createArrayBuilder().add(comps[1].trim()).build().get(0) : JsonValue.NULL;
-        
-        return response( req -> ok( Json.createObjectBuilder().add("version", version)
-                                                              .add("build", build)));
+        return ok(Json.createObjectBuilder()
+                .add("version", version)
+                .add("build", build));
     }
-    
+
     @GET
     @Path("server")
     public Response getServer() {
-        return response( req -> ok(JvmSettings.FQDN.lookup()));
+        return ok(JvmSettings.FQDN.lookup());
     }
-    
+
     @GET
     @Path("apiTermsOfUse")
     public Response getTermsOfUse() {
-        return response( req -> ok(systemConfig.getApiTermsOfUse()));
+        return ok(systemConfig.getApiTermsOfUse());
+    }
+
+    @GET
+    @Path("settings/incompleteMetadataViaApi")
+    public Response getAllowsIncompleteMetadata() {
+        return ok(JvmSettings.API_ALLOW_INCOMPLETE_METADATA.lookupOptional(Boolean.class).orElse(false));
+    }
+
+    @GET
+    @Path("zipDownloadLimit")
+    public Response getZipDownloadLimit() {
+        long zipDownloadLimit = SystemConfig.getLongLimitFromStringOrDefault(settingsSvc.getValueForKey(SettingsServiceBean.Key.ZipDownloadLimit), SystemConfig.defaultZipDownloadLimit);
+        return ok(zipDownloadLimit);
+    }
+
+    private Response getSettingResponseByKey(SettingsServiceBean.Key key) {
+        String setting = settingsService.getValueForKey(key);
+        if (setting != null) {
+            return ok(Json.createObjectBuilder().add("message", setting));
+        } else {
+            return notFound("Setting " + key + " not found");
+        }
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java b/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java
index 3912b9102e2..05d12f1083c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/LDNInbox.java
@@ -1,9 +1,12 @@
 package edu.harvard.iq.dataverse.api;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
 import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.GlobalIdServiceBean;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.MailServiceBean;
 import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
 import edu.harvard.iq.dataverse.RoleAssignment;
@@ -25,20 +28,20 @@
 import java.sql.Timestamp;
 import java.util.logging.Logger;
 
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonValue;
-import javax.json.JsonWriter;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.ForbiddenException;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonWriter;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.ServiceUnavailableException;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.ForbiddenException;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 @Path("inbox")
 public class LDNInbox extends AbstractApiBean {
@@ -131,13 +134,13 @@ public Response acceptMessage(String body) {
                                     .getString("@id");
                             if (citedResource.getString("@type").equals(JsonLDTerm.schemaOrg("Dataset").getUrl())) {
                                 logger.fine("Raw PID: " + pid);
-                                if (pid.startsWith(GlobalId.DOI_RESOLVER_URL)) {
-                                    pid = pid.replace(GlobalId.DOI_RESOLVER_URL, GlobalId.DOI_PROTOCOL + ":");
-                                } else if (pid.startsWith(GlobalId.HDL_RESOLVER_URL)) {
-                                    pid = pid.replace(GlobalId.HDL_RESOLVER_URL, GlobalId.HDL_PROTOCOL + ":");
+                                if (pid.startsWith(DOIServiceBean.DOI_RESOLVER_URL)) {
+                                    pid = pid.replace(DOIServiceBean.DOI_RESOLVER_URL, DOIServiceBean.DOI_PROTOCOL + ":");
+                                } else if (pid.startsWith(HandlenetServiceBean.HDL_RESOLVER_URL)) {
+                                    pid = pid.replace(HandlenetServiceBean.HDL_RESOLVER_URL, HandlenetServiceBean.HDL_PROTOCOL + ":");
                                 }
                                 logger.fine("Protocol PID: " + pid);
-                                Optional<GlobalId> id = GlobalId.parse(pid);
+                                Optional<GlobalId> id = GlobalIdServiceBean.parse(pid);
                                 Dataset dataset = datasetSvc.findByGlobalId(pid);
                                 if (dataset != null) {
                                     JsonObject citingResource = Json.createObjectBuilder().add("@id", citingPID)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java b/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java
index 1fdf7818cfb..ab50ebbf2e4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Licenses.java
@@ -2,19 +2,22 @@
 
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
 
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.core.Response.Status;
 
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.license.License;
 import edu.harvard.iq.dataverse.util.json.JsonPrinter;
@@ -51,11 +54,12 @@ public Response getLicenseById(@PathParam("id") long id) {
     }
 
     @POST
+    @AuthRequired
     @Path("/")
-    public Response addLicense(License license) {
+    public Response addLicense(@Context ContainerRequestContext crc, License license) {
         User authenticatedUser;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             if (!authenticatedUser.isSuperuser()) {
                 return error(Status.FORBIDDEN, "must be superuser");
             }
@@ -86,11 +90,12 @@ public Response getDefault() {
     }
 
     @PUT
+    @AuthRequired
     @Path("/default/{id}")
-    public Response setDefault(@PathParam("id") long id) {
+    public Response setDefault(@Context ContainerRequestContext crc, @PathParam("id") long id) {
         User authenticatedUser;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             if (!authenticatedUser.isSuperuser()) {
                 return error(Status.FORBIDDEN, "must be superuser");
             }
@@ -117,11 +122,12 @@ public Response setDefault(@PathParam("id") long id) {
     }
 
     @PUT
+    @AuthRequired
     @Path("/{id}/:active/{activeState}")
-    public Response setActiveState(@PathParam("id") long id, @PathParam("activeState") boolean active) {
+    public Response setActiveState(@Context ContainerRequestContext crc, @PathParam("id") long id, @PathParam("activeState") boolean active) {
         User authenticatedUser;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             if (!authenticatedUser.isSuperuser()) {
                 return error(Status.FORBIDDEN, "must be superuser");
             }
@@ -147,11 +153,12 @@ public Response setActiveState(@PathParam("id") long id, @PathParam("activeState
     }
 
     @PUT
+    @AuthRequired
     @Path("/{id}/:sortOrder/{sortOrder}")
-    public Response setSortOrder(@PathParam("id") long id, @PathParam("sortOrder") long sortOrder) {
+    public Response setSortOrder(@Context ContainerRequestContext crc, @PathParam("id") long id, @PathParam("sortOrder") long sortOrder) {
         User authenticatedUser;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             if (!authenticatedUser.isSuperuser()) {
                 return error(Status.FORBIDDEN, "must be superuser");
             }
@@ -178,11 +185,12 @@ public Response setSortOrder(@PathParam("id") long id, @PathParam("sortOrder") l
     }
 
     @DELETE
+    @AuthRequired
     @Path("/{id}")
-    public Response deleteLicenseById(@PathParam("id") long id) {
+    public Response deleteLicenseById(@Context ContainerRequestContext crc, @PathParam("id") long id) {
         User authenticatedUser;
         try {
-            authenticatedUser = findAuthenticatedUserOrDie();
+            authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             if (!authenticatedUser.isSuperuser()) {
                 return error(Status.FORBIDDEN, "must be superuser");
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Logout.java b/src/main/java/edu/harvard/iq/dataverse/api/Logout.java
new file mode 100644
index 00000000000..e8d8be04459
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Logout.java
@@ -0,0 +1,41 @@
+package edu.harvard.iq.dataverse.api;
+
+import edu.harvard.iq.dataverse.DataverseHeaderFragment;
+import edu.harvard.iq.dataverse.DataverseSession;
+import edu.harvard.iq.dataverse.settings.FeatureFlags;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
+
+@Path("logout")
+public class Logout extends AbstractApiBean {
+
+    @Inject
+    DataverseSession session;
+
+    /**
+     * The only current API authentication mechanism subject to Log Out is the session cookie auth, and this mechanism is only available when the corresponding feature flag is enabled:
+     *
+     * @see FeatureFlags#API_SESSION_AUTH
+     * <p>
+     * This endpoint replicates the logic from the JSF Log Out feature:
+     * @see DataverseHeaderFragment#logout()
+     * <p>
+     * TODO: This endpoint must change when a final API authentication mechanism is established for use cases / applications subject to Log Out
+     */
+    @POST
+    @Path("/")
+    public Response logout() {
+        if (!FeatureFlags.API_SESSION_AUTH.enabled()) {
+            return error(Response.Status.INTERNAL_SERVER_ERROR, "This endpoint is only available when session authentication feature flag is enabled");
+        }
+        if (!session.getUser().isAuthenticated()) {
+            return error(Response.Status.BAD_REQUEST, "No valid session cookie was sent in the request");
+        }
+        session.setUser(null);
+        session.setStatusDismissed(false);
+        return ok("User logged out");
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Mail.java b/src/main/java/edu/harvard/iq/dataverse/api/Mail.java
index 3b5050b480b..5fac2f30c89 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Mail.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Mail.java
@@ -2,10 +2,10 @@
 
 import edu.harvard.iq.dataverse.MailServiceBean;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
-import javax.ejb.EJB;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.core.Response;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
index 8f6ec6b1c7d..b2696757220 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/MakeDataCountApi.java
@@ -6,30 +6,31 @@
 import edu.harvard.iq.dataverse.makedatacount.DatasetExternalCitationsServiceBean;
 import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics;
 import edu.harvard.iq.dataverse.makedatacount.DatasetMetricsServiceBean;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.SystemConfig;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
-import java.io.FileReader;
 import java.io.IOException;
+import java.io.InputStream;
 import java.net.HttpURLConnection;
-import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.Iterator;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.Status;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.Status;
 
 /**
  * Note that there are makeDataCount endpoints in Datasets.java as well.
@@ -82,26 +83,21 @@ public Response sendDataToHub() {
     @Path("{id}/addUsageMetricsFromSushiReport")
     public Response addUsageMetricsFromSushiReport(@PathParam("id") String id, @QueryParam("reportOnDisk") String reportOnDisk) {
 
-        JsonObject report;
-
-        try (FileReader reader = new FileReader(reportOnDisk)) {
-            report = Json.createReader(reader).readObject();
-            Dataset dataset;
-            try {
-                dataset = findDatasetOrDie(id);
-                List<DatasetMetrics> datasetMetrics = datasetMetricsService.parseSushiReport(report, dataset);
-                if (!datasetMetrics.isEmpty()) {
-                    for (DatasetMetrics dm : datasetMetrics) {
-                        datasetMetricsService.save(dm);
-                    }
+        try {
+            JsonObject report = JsonUtil.getJsonObjectFromFile(reportOnDisk);
+            Dataset dataset = findDatasetOrDie(id);
+            List<DatasetMetrics> datasetMetrics = datasetMetricsService.parseSushiReport(report, dataset);
+            if (!datasetMetrics.isEmpty()) {
+                for (DatasetMetrics dm : datasetMetrics) {
+                    datasetMetricsService.save(dm);
                 }
-            } catch (WrappedResponse ex) {
-                Logger.getLogger(MakeDataCountApi.class.getName()).log(Level.SEVERE, null, ex);
-                return error(Status.BAD_REQUEST, "Wrapped response: " + ex.getLocalizedMessage());
             }
+        } catch (WrappedResponse ex) {
+            logger.log(Level.SEVERE, null, ex);
+            return error(Status.BAD_REQUEST, "Wrapped response: " + ex.getLocalizedMessage());
 
         } catch (IOException ex) {
-            System.out.print(ex.getMessage());
+            logger.log(Level.WARNING, ex.getMessage());
             return error(Status.BAD_REQUEST, "IOException: " + ex.getLocalizedMessage());
         }
         String msg = "Dummy Data has been added to dataset " + id;
@@ -112,10 +108,8 @@ public Response addUsageMetricsFromSushiReport(@PathParam("id") String id, @Quer
     @Path("/addUsageMetricsFromSushiReport")
     public Response addUsageMetricsFromSushiReportAll(@PathParam("id") String id, @QueryParam("reportOnDisk") String reportOnDisk) {
 
-        JsonObject report;
-
-        try (FileReader reader = new FileReader(reportOnDisk)) {
-            report = Json.createReader(reader).readObject();
+        try {
+            JsonObject report = JsonUtil.getJsonObjectFromFile(reportOnDisk);
 
             List<DatasetMetrics> datasetMetrics = datasetMetricsService.parseSushiReport(report, null);
             if (!datasetMetrics.isEmpty()) {
@@ -125,7 +119,7 @@ public Response addUsageMetricsFromSushiReportAll(@PathParam("id") String id, @Q
             }
 
         } catch (IOException ex) {
-            System.out.print(ex.getMessage());
+            logger.log(Level.WARNING, ex.getMessage());
             return error(Status.BAD_REQUEST, "IOException: " + ex.getLocalizedMessage());
         }
         String msg = "Usage Metrics Data has been added to all datasets from file  " + reportOnDisk;
@@ -134,14 +128,18 @@ public Response addUsageMetricsFromSushiReportAll(@PathParam("id") String id, @Q
 
     @POST
     @Path("{id}/updateCitationsForDataset")
-    public Response updateCitationsForDataset(@PathParam("id") String id) throws MalformedURLException, IOException {
+    public Response updateCitationsForDataset(@PathParam("id") String id) throws IOException {
         try {
             Dataset dataset = findDatasetOrDie(id);
             String persistentId = dataset.getGlobalId().toString();
+            //ToDo - if this isn't a DOI?
             // DataCite wants "doi=", not "doi:".
             String authorityPlusIdentifier = persistentId.replaceFirst("doi:", "");
             // Request max page size and then loop to handle multiple pages
-            URL url = new URL(systemConfig.getDataCiteRestApiUrlString() + "/events?doi=" + authorityPlusIdentifier + "&source=crossref&page[size]=1000");
+            URL url = new URL(JvmSettings.DATACITE_REST_API_URL.lookup() +
+                              "/events?doi=" +
+                              authorityPlusIdentifier +
+                              "&source=crossref&page[size]=1000");
             logger.fine("Retrieving Citations from " + url.toString());
             boolean nextPage = true;
             JsonArrayBuilder dataBuilder = Json.createArrayBuilder();
@@ -153,7 +151,10 @@ public Response updateCitationsForDataset(@PathParam("id") String id) throws Mal
                     logger.warning("Failed to get citations from " + url.toString());
                     return error(Status.fromStatusCode(status), "Failed to get citations from " + url.toString());
                 }
-                JsonObject report = Json.createReader(connection.getInputStream()).readObject();
+                JsonObject report;
+                try (InputStream inStream = connection.getInputStream()) {
+                    report = JsonUtil.getJsonObject(inStream);
+                }
                 JsonObject links = report.getJsonObject("links");
                 JsonArray data = report.getJsonArray("data");
                 Iterator<JsonValue> iter = data.iterator();
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java
index 1ca97f2ec69..a38840ba50d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Meta.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Meta.java
@@ -15,19 +15,19 @@
 import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
 
 import java.util.logging.Logger;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import java.io.ByteArrayOutputStream;
 
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.ServiceUnavailableException;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.ServiceUnavailableException;
 
 /*
     Custom API exceptions [NOT YET IMPLEMENTED]
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java
index b0d82b69d1b..bd937878286 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Metadata.java
@@ -8,22 +8,17 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 
-import java.io.IOException;
-import java.util.concurrent.Future;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.*;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.*;
 
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response;
 
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAISet;
-import org.apache.solr.client.solrj.SolrServerException;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java
index b3e1dad13af..448fb48e389 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/MetadataBlocks.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.MetadataBlock;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Response;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief;
-import javax.ws.rs.PathParam;
+import jakarta.ws.rs.PathParam;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java
index c59d86a77da..7bb2570334b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Metrics.java
@@ -12,22 +12,22 @@
 import java.util.List;
 import java.util.logging.Logger;
 
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.ws.rs.GET;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Request;
-import javax.ws.rs.core.Response;
-
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import javax.ws.rs.core.UriInfo;
-import javax.ws.rs.core.Variant;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Request;
+import jakarta.ws.rs.core.Response;
+
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.ws.rs.core.UriInfo;
+import jakarta.ws.rs.core.Variant;
 
 /**
  * API endpoints for various metrics.
@@ -412,7 +412,7 @@ public Response getFilesByTypeTimeSeries(@Context Request req, @Context UriInfo
         } catch (IllegalArgumentException ia) {
             return error(BAD_REQUEST, ia.getLocalizedMessage());
         }
-        String metricName = "filesByType";
+        String metricName = "filesByTypeMonthly";
 
         JsonArray jsonArray = MetricsUtil.stringToJsonArray(metricsSvc.returnUnexpiredCacheAllTime(metricName, null, d));
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
index c477788cae6..37c894d3071 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Notifications.java
@@ -3,6 +3,7 @@
 import edu.harvard.iq.dataverse.MailServiceBean;
 import edu.harvard.iq.dataverse.UserNotification;
 import edu.harvard.iq.dataverse.UserNotification.Type;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.workflows.WorkflowUtil;
@@ -10,17 +11,19 @@
 import java.util.Optional;
 import java.util.Set;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 import edu.harvard.iq.dataverse.util.MailUtil;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
@@ -34,17 +37,10 @@ public class Notifications extends AbstractApiBean {
     MailServiceBean mailService;
     
     @GET
+    @AuthRequired
     @Path("/all")
-    public Response getAllNotificationsForUser() {
-        User user;
-        try {
-            user = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.UNAUTHORIZED, "You must supply an API token.");
-        }
-        if (user == null) {
-            return error(Response.Status.BAD_REQUEST, "A user could not be found based on the API token.");
-        }
+    public Response getAllNotificationsForUser(@Context ContainerRequestContext crc) {
+        User user = getRequestUser(crc);
         if (!(user instanceof AuthenticatedUser)) {
             // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
             return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
@@ -88,17 +84,10 @@ private JsonArrayBuilder getReasonsForReturn(UserNotification notification) {
     }
 
     @DELETE
+    @AuthRequired
     @Path("/{id}")
-    public Response deleteNotificationForUser(@PathParam("id") long id) {
-        User user;
-        try {
-            user = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.UNAUTHORIZED, "You must supply an API token.");
-        }
-        if (user == null) {
-            return error(Response.Status.BAD_REQUEST, "A user could not be found based on the API token.");
-        }
+    public Response deleteNotificationForUser(@Context ContainerRequestContext crc, @PathParam("id") long id) {
+        User user = getRequestUser(crc);
         if (!(user instanceof AuthenticatedUser)) {
             // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
             return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
@@ -117,17 +106,10 @@ public Response deleteNotificationForUser(@PathParam("id") long id) {
     }
 
     @GET
+    @AuthRequired
     @Path("/mutedEmails")
-    public Response getMutedEmailsForUser() {
-        User user;
-        try {
-            user = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.UNAUTHORIZED, "You must supply an API token.");
-        }
-        if (user == null) {
-            return error(Response.Status.BAD_REQUEST, "A user could not be found based on the API token.");
-        }
+    public Response getMutedEmailsForUser(@Context ContainerRequestContext crc) {
+        User user = getRequestUser(crc);
         if (!(user instanceof AuthenticatedUser)) {
             // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
             return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
@@ -143,17 +125,10 @@ public Response getMutedEmailsForUser() {
     }
 
     @PUT
+    @AuthRequired
     @Path("/mutedEmails/{typeName}")
-    public Response muteEmailsForUser(@PathParam("typeName") String typeName) {
-        User user;
-        try {
-            user = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.UNAUTHORIZED, "You must supply an API token.");
-        }
-        if (user == null) {
-            return error(Response.Status.BAD_REQUEST, "A user could not be found based on the API token.");
-        }
+    public Response muteEmailsForUser(@Context ContainerRequestContext crc, @PathParam("typeName") String typeName) {
+        User user = getRequestUser(crc);
         if (!(user instanceof AuthenticatedUser)) {
             // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
             return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
@@ -174,17 +149,10 @@ public Response muteEmailsForUser(@PathParam("typeName") String typeName) {
     }
 
     @DELETE
+    @AuthRequired
     @Path("/mutedEmails/{typeName}")
-    public Response unmuteEmailsForUser(@PathParam("typeName") String typeName) {
-        User user;
-        try {
-            user = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.UNAUTHORIZED, "You must supply an API token.");
-        }
-        if (user == null) {
-            return error(Response.Status.BAD_REQUEST, "A user could not be found based on the API token.");
-        }
+    public Response unmuteEmailsForUser(@Context ContainerRequestContext crc, @PathParam("typeName") String typeName) {
+        User user = getRequestUser(crc);
         if (!(user instanceof AuthenticatedUser)) {
             // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
             return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
@@ -205,17 +173,10 @@ public Response unmuteEmailsForUser(@PathParam("typeName") String typeName) {
     }
 
     @GET
+    @AuthRequired
     @Path("/mutedNotifications")
-    public Response getMutedNotificationsForUser() {
-        User user;
-        try {
-            user = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.UNAUTHORIZED, "You must supply an API token.");
-        }
-        if (user == null) {
-            return error(Response.Status.BAD_REQUEST, "A user could not be found based on the API token.");
-        }
+    public Response getMutedNotificationsForUser(@Context ContainerRequestContext crc) {
+        User user = getRequestUser(crc);
         if (!(user instanceof AuthenticatedUser)) {
             // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
             return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
@@ -231,17 +192,10 @@ public Response getMutedNotificationsForUser() {
     }
 
     @PUT
+    @AuthRequired
     @Path("/mutedNotifications/{typeName}")
-    public Response muteNotificationsForUser(@PathParam("typeName") String typeName) {
-        User user;
-        try {
-            user = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.UNAUTHORIZED, "You must supply an API token.");
-        }
-        if (user == null) {
-            return error(Response.Status.BAD_REQUEST, "A user could not be found based on the API token.");
-        }
+    public Response muteNotificationsForUser(@Context ContainerRequestContext crc, @PathParam("typeName") String typeName) {
+        User user = getRequestUser(crc);
         if (!(user instanceof AuthenticatedUser)) {
             // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
             return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
@@ -262,17 +216,10 @@ public Response muteNotificationsForUser(@PathParam("typeName") String typeName)
     }
 
     @DELETE
+    @AuthRequired
     @Path("/mutedNotifications/{typeName}")
-    public Response unmuteNotificationsForUser(@PathParam("typeName") String typeName) {
-        User user;
-        try {
-            user = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.UNAUTHORIZED, "You must supply an API token.");
-        }
-        if (user == null) {
-            return error(Response.Status.BAD_REQUEST, "A user could not be found based on the API token.");
-        }
+    public Response unmuteNotificationsForUser(@Context ContainerRequestContext crc, @PathParam("typeName") String typeName) {
+        User user = getRequestUser(crc);
         if (!(user instanceof AuthenticatedUser)) {
             // It's unlikely we'll reach this error. A Guest doesn't have an API token and would have been blocked above.
             return error(Response.Status.BAD_REQUEST, "Only an AuthenticatedUser can have notifications.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Pids.java b/src/main/java/edu/harvard/iq/dataverse/api/Pids.java
index 5a2acf3209f..534e42fd505 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Pids.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Pids.java
@@ -1,28 +1,33 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.engine.command.impl.DeletePidCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.ReservePidCommand;
 import edu.harvard.iq.dataverse.pidproviders.PidUtil;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.util.Arrays;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.InternalServerErrorException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 /**
  * PIDs are Persistent IDentifiers such as DOIs or Handles.
@@ -36,21 +41,23 @@
 public class Pids extends AbstractApiBean {
 
     @GET
+    @AuthRequired
     @Produces(MediaType.APPLICATION_JSON)
-    public Response getPid(@QueryParam("persistentId") String persistentId) {
-        try {
-            User user = findUserOrDie();
-            if (!user.isSuperuser()) {
-                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("admin.api.auth.mustBeSuperUser"));
-            }
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("api.errors.invalidApiToken"));
+    public Response getPid(@Context ContainerRequestContext crc, @QueryParam("persistentId") String persistentId) {
+        User user = getRequestUser(crc);
+        if (!user.isSuperuser()) {
+            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("admin.api.auth.mustBeSuperUser"));
         }
-        String baseUrl = systemConfig.getDataCiteRestApiUrlString();
-        String username = System.getProperty("doi.username");
-        String password = System.getProperty("doi.password");
+
+        // FIXME: Even before changing to MPCONFIG retrieval, this was pinned to be DataCite specific!
+        //        Should this be extended to EZID and other PID systems like Handle?
+        String baseUrl = JvmSettings.DATACITE_REST_API_URL.lookup();
+        String username = JvmSettings.DATACITE_USERNAME.lookup();
+        String password = JvmSettings.DATACITE_PASSWORD.lookup();
+        
         try {
-            JsonObjectBuilder result = PidUtil.queryDoi(persistentId, baseUrl, username, password);
+            GlobalId globalId = PidUtil.parseAsGlobalID(persistentId);
+            JsonObjectBuilder result = PidUtil.queryDoi(globalId, baseUrl, username, password);
             return ok(result);
         } catch (NotFoundException ex) {
             return error(ex.getResponse().getStatusInfo().toEnum(), ex.getLocalizedMessage());
@@ -60,16 +67,13 @@ public Response getPid(@QueryParam("persistentId") String persistentId) {
     }
 
     @GET
+    @AuthRequired
     @Produces(MediaType.APPLICATION_JSON)
     @Path("unreserved")
-    public Response getUnreserved(@QueryParam("persistentId") String persistentId) {
-        try {
-            User user = findUserOrDie();
-            if (!user.isSuperuser()) {
-                return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("admin.api.auth.mustBeSuperUser"));
-            }
-        } catch (WrappedResponse ex) {
-            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("api.errors.invalidApiToken"));
+    public Response getUnreserved(@Context ContainerRequestContext crc, @QueryParam("persistentId") String persistentId) {
+        User user = getRequestUser(crc);
+        if (!user.isSuperuser()) {
+            return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("admin.api.auth.mustBeSuperUser"));
         }
 
         JsonArrayBuilder unreserved = Json.createArrayBuilder();
@@ -93,12 +97,13 @@ public Response getUnreserved(@QueryParam("persistentId") String persistentId) {
     }
 
     @POST
+    @AuthRequired
     @Produces(MediaType.APPLICATION_JSON)
     @Path("{id}/reserve")
-    public Response reservePid(@PathParam("id") String idSupplied) {
+    public Response reservePid(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         try {
             Dataset dataset = findDatasetOrDie(idSupplied);
-            execCommand(new ReservePidCommand(createDataverseRequest(findUserOrDie()), dataset));
+            execCommand(new ReservePidCommand(createDataverseRequest(getRequestUser(crc)), dataset));
             return ok(BundleUtil.getStringFromBundle("pids.api.reservePid.success", Arrays.asList(dataset.getGlobalId().asString())));
         } catch (WrappedResponse ex) {
             return ex.getResponse();
@@ -106,9 +111,10 @@ public Response reservePid(@PathParam("id") String idSupplied) {
     }
 
     @DELETE
+    @AuthRequired
     @Produces(MediaType.APPLICATION_JSON)
     @Path("{id}/delete")
-    public Response deletePid(@PathParam("id") String idSupplied) {
+    public Response deletePid(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied) {
         try {
             Dataset dataset = findDatasetOrDie(idSupplied);
             //Restrict to never-published datasets (that should have draft/nonpublic pids). The underlying code will invalidate
@@ -117,7 +123,7 @@ public Response deletePid(@PathParam("id") String idSupplied) {
             if(dataset.isReleased()) {
             	return badRequest("Not allowed for Datasets that have been published.");
             }
-            execCommand(new DeletePidCommand(createDataverseRequest(findUserOrDie()), dataset));
+            execCommand(new DeletePidCommand(createDataverseRequest(getRequestUser(crc)), dataset));
             return ok(BundleUtil.getStringFromBundle("pids.api.deletePid.success", Arrays.asList(dataset.getGlobalId().asString())));
         } catch (WrappedResponse ex) {
             return ex.getResponse();
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java
index bb40c53c1ca..7f81ca20988 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java
@@ -1,6 +1,7 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.provenance.ProvEntityFileData;
 import edu.harvard.iq.dataverse.provenance.ProvInvestigator;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -11,24 +12,27 @@
 import edu.harvard.iq.dataverse.engine.command.impl.PersistProvJsonCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import java.io.StringReader;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
 import java.util.HashMap;
 import java.util.logging.Logger;
-import javax.inject.Inject;
-import javax.json.Json;
-import javax.json.JsonException;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
+import jakarta.inject.Inject;
+import jakarta.json.Json;
+import jakarta.json.JsonException;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
 
 @Path("files")
 public class Prov extends AbstractApiBean {
@@ -39,9 +43,10 @@ public class Prov extends AbstractApiBean {
     
     /** Provenance JSON methods **/
     @POST
+    @AuthRequired
     @Path("{id}/prov-json")
     @Consumes("application/json")
-    public Response addProvJson(String body, @PathParam("id") String idSupplied, @QueryParam("entityName") String entityName) {
+    public Response addProvJson(@Context ContainerRequestContext crc, String body, @PathParam("id") String idSupplied, @QueryParam("entityName") String entityName) {
         if(!systemConfig.isProvCollectionEnabled()) {
             return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled"));
         }
@@ -68,7 +73,7 @@ public Response addProvJson(String body, @PathParam("id") String idSupplied, @Qu
                 return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.entityMismatch"));
             }
             
-            execCommand(new PersistProvJsonCommand(createDataverseRequest(findUserOrDie()), dataFile , body, entityName, true));
+            execCommand(new PersistProvJsonCommand(createDataverseRequest(getRequestUser(crc)), dataFile , body, entityName, true));
             JsonObjectBuilder jsonResponse = Json.createObjectBuilder();
             jsonResponse.add("message", BundleUtil.getStringFromBundle("api.prov.provJsonSaved") + " " + dataFile.getDisplayName());
             return ok(jsonResponse);
@@ -78,8 +83,9 @@ public Response addProvJson(String body, @PathParam("id") String idSupplied, @Qu
     }
     
     @DELETE
+    @AuthRequired
     @Path("{id}/prov-json")
-    public Response deleteProvJson(String body, @PathParam("id") String idSupplied) {
+    public Response deleteProvJson(@Context ContainerRequestContext crc, String body, @PathParam("id") String idSupplied) {
         if(!systemConfig.isProvCollectionEnabled()) {
             return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled"));
         }
@@ -88,7 +94,7 @@ public Response deleteProvJson(String body, @PathParam("id") String idSupplied)
             if(dataFile.isReleased()){
                 return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.jsonDeleteNotAllowed"));
             }
-            execCommand(new DeleteProvJsonCommand(createDataverseRequest(findUserOrDie()), dataFile, true));
+            execCommand(new DeleteProvJsonCommand(createDataverseRequest(getRequestUser(crc)), dataFile, true));
             return ok(BundleUtil.getStringFromBundle("api.prov.provJsonDeleted"));
         } catch (WrappedResponse ex) {
             return ex.getResponse();
@@ -97,17 +103,17 @@ public Response deleteProvJson(String body, @PathParam("id") String idSupplied)
 
     /** Provenance FreeForm methods **/
     @POST
+    @AuthRequired
     @Path("{id}/prov-freeform")
     @Consumes("application/json")
-    public Response addProvFreeForm(String body, @PathParam("id") String idSupplied) {
+    public Response addProvFreeForm(@Context ContainerRequestContext crc, String body, @PathParam("id") String idSupplied) {
         if(!systemConfig.isProvCollectionEnabled()) {
             return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled"));
         }
-        StringReader rdr = new StringReader(body);
         JsonObject jsonObj = null;
         
         try {
-            jsonObj = Json.createReader(rdr).readObject();
+            jsonObj = JsonUtil.getJsonObject(body);
         } catch (JsonException ex) {
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.freeformInvalidJson"));
         }
@@ -118,7 +124,7 @@ public Response addProvFreeForm(String body, @PathParam("id") String idSupplied)
             return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.freeformMissingJsonKey"));
         }
         try {
-            DataverseRequest dr= createDataverseRequest(findUserOrDie());
+            DataverseRequest dr= createDataverseRequest(getRequestUser(crc));
             DataFile dataFile = findDataFileOrDie(idSupplied);
             if (dataFile == null) {
                 return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.badDataFileId"));
@@ -136,13 +142,14 @@ public Response addProvFreeForm(String body, @PathParam("id") String idSupplied)
     }
     
     @GET
+    @AuthRequired
     @Path("{id}/prov-freeform")
-    public Response getProvFreeForm(String body, @PathParam("id") String idSupplied) {
+    public Response getProvFreeForm(@Context ContainerRequestContext crc, String body, @PathParam("id") String idSupplied) {
         if(!systemConfig.isProvCollectionEnabled()) {
             return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled"));
         }
         try {
-            String freeFormText = execCommand(new GetProvFreeFormCommand(createDataverseRequest(findUserOrDie()), findDataFileOrDie(idSupplied)));
+            String freeFormText = execCommand(new GetProvFreeFormCommand(createDataverseRequest(getRequestUser(crc)), findDataFileOrDie(idSupplied)));
             if(null == freeFormText) {
                 return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.freeformNoText"));
             }
@@ -155,13 +162,14 @@ public Response getProvFreeForm(String body, @PathParam("id") String idSupplied)
     }
     
     @GET
+    @AuthRequired
     @Path("{id}/prov-json")
-    public Response getProvJson(String body, @PathParam("id") String idSupplied) {
+    public Response getProvJson(@Context ContainerRequestContext crc, String body, @PathParam("id") String idSupplied) {
         if(!systemConfig.isProvCollectionEnabled()) {
             return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled"));
         }
         try {
-            JsonObject jsonText = execCommand(new GetProvJsonCommand(createDataverseRequest(findUserOrDie()), findDataFileOrDie(idSupplied)));
+            JsonObject jsonText = execCommand(new GetProvJsonCommand(createDataverseRequest(getRequestUser(crc)), findDataFileOrDie(idSupplied)));
             if(null == jsonText) {
                 return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.jsonNoContent"));
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java
index 72add184a24..8812f95dea1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Roles.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Roles.java
@@ -1,25 +1,26 @@
 package edu.harvard.iq.dataverse.api;
 
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.api.dto.RoleDTO;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*;
 import edu.harvard.iq.dataverse.engine.command.impl.CreateRoleCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.DeleteRoleCommand;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.util.Arrays;
 import java.util.List;
-import javax.ejb.Stateless;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 
 /**
  * Util API for managing roles. Might not make it to the production version.
@@ -30,10 +31,11 @@
 public class Roles extends AbstractApiBean {
 	
 	@GET
+    @AuthRequired
 	@Path("{id}")
-	public Response viewRole( @PathParam("id") String id) {
+	public Response viewRole(@Context ContainerRequestContext crc, @PathParam("id") String id) {
         return response( ()-> {
-            final User user = findUserOrDie(); 
+            final User user = getRequestUser(crc);
             final DataverseRole role = findRoleOrDie(id);
             return ( permissionSvc.userOn(user, role.getOwner()).has(Permission.ManageDataversePermissions) ) 
                     ? ok( json(role) ) : permissionError("Permission required to view roles.");
@@ -41,8 +43,9 @@ public Response viewRole( @PathParam("id") String id) {
 	}
 	
     @DELETE
+    @AuthRequired
     @Path("{id}")
-    public Response deleteRole(@PathParam("id") String id) {
+    public Response deleteRole(@Context ContainerRequestContext crc, @PathParam("id") String id) {
         return response(req -> {
             DataverseRole role = findRoleOrDie(id);
             List<String> args = Arrays.asList(role.getName());
@@ -51,15 +54,17 @@ public Response deleteRole(@PathParam("id") String id) {
             }
             execCommand(new DeleteRoleCommand(req, role));
             return ok("role " + role.getName() + " deleted.");
-        });
+        }, getRequestUser(crc));
     }
 	
 	@POST
-	public Response createNewRole( RoleDTO roleDto,
-                                   @QueryParam("dvo") String dvoIdtf ) {
+    @AuthRequired
+	public Response createNewRole(@Context ContainerRequestContext crc,
+                                  RoleDTO roleDto,
+                                  @QueryParam("dvo") String dvoIdtf) {
         return response( req -> ok(json(execCommand(
                                   new CreateRoleCommand(roleDto.asRole(),
-                                                        req,findDataverseOrDie(dvoIdtf))))));
+                                                        req,findDataverseOrDie(dvoIdtf))))), getRequestUser(crc));
 	}
     
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
index 7ead0d23711..5d0365d022e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/SavedSearches.java
@@ -11,23 +11,23 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
 
 @Path("admin/savedsearches")
 public class SavedSearches extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Search.java b/src/main/java/edu/harvard/iq/dataverse/api/Search.java
index cef509b1ec5..71e2865ca4d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Search.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Search.java
@@ -1,6 +1,7 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.search.SearchFields;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.DvObjectServiceBean;
@@ -23,18 +24,18 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -56,7 +57,9 @@ public class Search extends AbstractApiBean {
     SolrIndexServiceBean SolrIndexService;
 
     @GET
+    @AuthRequired
     public Response search(
+            @Context ContainerRequestContext crc,
             @QueryParam("q") String query,
             @QueryParam("type") final List<String> types,
             @QueryParam("subtree") final List<String> subtrees,
@@ -79,7 +82,7 @@ public Response search(
 
         User user;
         try {
-            user = getUser();
+            user = getUser(crc);
         } catch (WrappedResponse ex) {
             return ex.getResponse();
         }
@@ -154,7 +157,9 @@ public Response search(
                         numResultsPerPage,
                         true, //SEK get query entities always for search API additional Dataset Information 6300  12/6/2019
                         geoPoint,
-                        geoRadius
+                        geoRadius,
+                        showFacets, // facets are expensive, no need to ask for them if not requested
+                        showRelevance // no need for highlights unless requested either
                 );
             } catch (SearchException ex) {
                 Throwable cause = ex;
@@ -227,10 +232,10 @@ public Response search(
         }
     }
 
-    private User getUser() throws WrappedResponse {
+    private User getUser(ContainerRequestContext crc) throws WrappedResponse {
         User userToExecuteSearchAs = GuestUser.get();
         try {
-            AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie();
+            AuthenticatedUser authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             if (authenticatedUser != null) {
                 userToExecuteSearchAs = authenticatedUser;
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java b/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java
index 787c3380e5b..37d6a2aa3fe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/SiteMap.java
@@ -2,13 +2,13 @@
 
 import edu.harvard.iq.dataverse.sitemap.SiteMapServiceBean;
 import edu.harvard.iq.dataverse.sitemap.SiteMapUtil;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 @Stateless
 @Path("admin/sitemap")
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java b/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java
index 54adeecd9f9..2915328428e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/StorageSites.java
@@ -3,16 +3,16 @@
 import edu.harvard.iq.dataverse.locality.StorageSite;
 import edu.harvard.iq.dataverse.locality.StorageSiteUtil;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
 
 @Path("admin/storageSites")
 public class StorageSites extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
index 42caa95b9f5..87be1f14e05 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
@@ -2,20 +2,19 @@
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool;
 import edu.harvard.iq.dataverse.externaltools.ExternalToolHandler;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.Response;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.core.Response;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 
 @Path("admin/test")
 public class TestApi extends AbstractApiBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java b/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java
index 15c3b34f6af..05ba150df8e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/TestIngest.java
@@ -18,22 +18,22 @@
 import edu.harvard.iq.dataverse.util.StringUtil;
 import java.io.BufferedInputStream;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.nio.file.Paths;
 import java.nio.file.StandardCopyOption;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.UriInfo;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.QueryParam;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.UriInfo;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.QueryParam;
 
 
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java
index 7568c7caff6..791fc7aa774 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java
@@ -5,7 +5,7 @@
  */
 package edu.harvard.iq.dataverse.api;
 
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.error;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
@@ -13,31 +13,30 @@
 import edu.harvard.iq.dataverse.engine.command.impl.GetUserTracesCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.MergeInAccountCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand;
-import edu.harvard.iq.dataverse.metrics.MetricsUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
 
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.json.JsonArray;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Request;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Variant;
+import jakarta.ejb.Stateless;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Request;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Variant;
 
 /**
  *
@@ -50,11 +49,12 @@ public class Users extends AbstractApiBean {
     private static final Logger logger = Logger.getLogger(Users.class.getName());
     
     @POST
+    @AuthRequired
     @Path("{consumedIdentifier}/mergeIntoUser/{baseIdentifier}")
-    public Response mergeInAuthenticatedUser(@PathParam("consumedIdentifier") String consumedIdentifier, @PathParam("baseIdentifier") String baseIdentifier) {
+    public Response mergeInAuthenticatedUser(@Context ContainerRequestContext crc, @PathParam("consumedIdentifier") String consumedIdentifier, @PathParam("baseIdentifier") String baseIdentifier) {
         User u;
         try {
-            u = findUserOrDie();
+            u = getRequestUser(crc);
             if(!u.isSuperuser()) {
                 throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can merge users"));
             }
@@ -88,11 +88,12 @@ public Response mergeInAuthenticatedUser(@PathParam("consumedIdentifier") String
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/changeIdentifier/{newIdentifier}")
-    public Response changeAuthenticatedUserIdentifier(@PathParam("identifier") String oldIdentifier, @PathParam("newIdentifier")  String newIdentifier) {
+    public Response changeAuthenticatedUserIdentifier(@Context ContainerRequestContext crc, @PathParam("identifier") String oldIdentifier, @PathParam("newIdentifier")  String newIdentifier) {
         User u;
         try {
-            u = findUserOrDie();
+            u = getRequestUser(crc);
             if(!u.isSuperuser()) {
                 throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can change userIdentifiers"));
             }
@@ -121,16 +122,11 @@ public Response changeAuthenticatedUserIdentifier(@PathParam("identifier") Strin
     }
     
     @Path("token")
+    @AuthRequired
     @DELETE
-    public Response deleteToken() {
-        User u;
-
-        try {
-            u = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return ex.getResponse();
-        }
-        AuthenticatedUser au;        
+    public Response deleteToken(@Context ContainerRequestContext crc) {
+        User u = getRequestUser(crc);
+        AuthenticatedUser au;
        
         try{
              au = (AuthenticatedUser) u; 
@@ -145,16 +141,9 @@ public Response deleteToken() {
     }
     
     @Path("token")
+    @AuthRequired
     @GET
     public Response getTokenExpirationDate() {
-        User u;
-        
-        try {
-            u = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return ex.getResponse();
-        }      
-        
         ApiToken token = authSvc.findApiToken(getRequestApiKey());
         
         if (token == null) {
@@ -166,16 +155,11 @@ public Response getTokenExpirationDate() {
     }
     
     @Path("token/recreate")
+    @AuthRequired
     @POST
-    public Response recreateToken() {
-        User u;
+    public Response recreateToken(@Context ContainerRequestContext crc) {
+        User u = getRequestUser(crc);
 
-        try {
-            u = findUserOrDie();
-        } catch (WrappedResponse ex) {
-            return ex.getResponse();
-        }
-        
         AuthenticatedUser au;        
         try{
              au = (AuthenticatedUser) u; 
@@ -195,8 +179,9 @@ public Response recreateToken() {
     }
     
     @GET
+    @AuthRequired
     @Path(":me")
-    public Response getAuthenticatedUserByToken() {
+    public Response getAuthenticatedUserByToken(@Context ContainerRequestContext crc) {
 
         String tokenFromRequestAPI = getRequestApiKey();
 
@@ -205,7 +190,7 @@ public Response getAuthenticatedUserByToken() {
         // this is a good idea
         if (authenticatedUser == null) {
             try {
-                authenticatedUser = findAuthenticatedUserOrDie();
+                authenticatedUser = getRequestAuthenticatedUserOrDie(crc);
             } catch (WrappedResponse ex) {
                 Logger.getLogger(Users.class.getName()).log(Level.SEVERE, null, ex);
                 return error(Response.Status.BAD_REQUEST, "User with token " + tokenFromRequestAPI + " not found.");
@@ -215,14 +200,15 @@ public Response getAuthenticatedUserByToken() {
     }
 
     @POST
+    @AuthRequired
     @Path("{identifier}/removeRoles")
-    public Response removeUserRoles(@PathParam("identifier") String identifier) {
+    public Response removeUserRoles(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier) {
         try {
             AuthenticatedUser userToModify = authSvc.getAuthenticatedUser(identifier);
             if (userToModify == null) {
                 return error(Response.Status.BAD_REQUEST, "Cannot find user based on " + identifier + ".");
             }
-            execCommand(new RevokeAllRolesCommand(userToModify, createDataverseRequest(findUserOrDie())));
+            execCommand(new RevokeAllRolesCommand(userToModify, createDataverseRequest(getRequestUser(crc))));
             return ok("Roles removed for user " + identifier + ".");
         } catch (WrappedResponse wr) {
             return wr.getResponse();
@@ -230,11 +216,12 @@ public Response removeUserRoles(@PathParam("identifier") String identifier) {
     }
 
     @GET
+    @AuthRequired
     @Path("{identifier}/traces")
-    public Response getTraces(@PathParam("identifier") String identifier) {
+    public Response getTraces(@Context ContainerRequestContext crc, @PathParam("identifier") String identifier) {
         try {
             AuthenticatedUser userToQuery = authSvc.getAuthenticatedUser(identifier);
-            JsonObjectBuilder jsonObj = execCommand(new GetUserTracesCommand(createDataverseRequest(findUserOrDie()), userToQuery, null));
+            JsonObjectBuilder jsonObj = execCommand(new GetUserTracesCommand(createDataverseRequest(getRequestUser(crc)), userToQuery, null));
             return ok(jsonObj);
         } catch (WrappedResponse ex) {
             return ex.getResponse();
@@ -244,15 +231,16 @@ public Response getTraces(@PathParam("identifier") String identifier) {
     private List<String> elements = Arrays.asList("roleAssignments","dataverseCreator", "dataversePublisher","datasetCreator", "datasetPublisher","dataFileCreator","dataFilePublisher","datasetVersionUsers","explicitGroups","guestbookEntries", "savedSearches");
     
     @GET
+    @AuthRequired
     @Path("{identifier}/traces/{element}")
     @Produces("text/csv, application/json")
-    public Response getTraces(@Context Request req, @PathParam("identifier") String identifier, @PathParam("element") String element) {
+    public Response getTraces(@Context ContainerRequestContext crc, @Context Request req, @PathParam("identifier") String identifier, @PathParam("element") String element) {
         try {
             AuthenticatedUser userToQuery = authSvc.getAuthenticatedUser(identifier);
             if(!elements.contains(element)) {
                 throw new BadRequestException("Not a valid element");
             }
-            JsonObjectBuilder jsonObj = execCommand(new GetUserTracesCommand(createDataverseRequest(findUserOrDie()), userToQuery, element));
+            JsonObjectBuilder jsonObj = execCommand(new GetUserTracesCommand(createDataverseRequest(getRequestUser(crc)), userToQuery, element));
             
             List<Variant> vars = Variant
                     .mediaTypes(MediaType.valueOf(FileUtil.MIME_TYPE_CSV), MediaType.APPLICATION_JSON_TYPE)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Util.java b/src/main/java/edu/harvard/iq/dataverse/api/Util.java
index 82adedc709f..25855769a38 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Util.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Util.java
@@ -7,9 +7,9 @@
 import java.util.TimeZone;
 import java.util.TreeSet;
 import java.util.stream.Collectors;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonReader;
 
 public class Util {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java b/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java
index 4269a0215bf..4eadcedf71a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Workflows.java
@@ -8,11 +8,11 @@
 import java.util.Arrays;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
 
 /**
  * API Endpoint for external systems to report the results of workflow step
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
index 4babe6875e2..8d5024c1c14 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
@@ -11,18 +11,18 @@
 import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
 import java.util.Arrays;
 import java.util.Optional;
-import javax.ejb.EJB;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.PUT;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.Response;
 
 /**
  * API Endpoint for managing workflows.
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java
new file mode 100644
index 00000000000..0dd8a28baca
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanism.java
@@ -0,0 +1,73 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.UserServiceBean;
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import java.util.logging.Logger;
+
+/**
+ * @author Guillermo Portas
+ * Authentication mechanism that attempts to authenticate a user from an API Key provided in an API request.
+ */
+public class ApiKeyAuthMechanism implements AuthMechanism {
+
+    public static final String DATAVERSE_API_KEY_REQUEST_HEADER_NAME = "X-Dataverse-key";
+    public static final String DATAVERSE_API_KEY_REQUEST_PARAM_NAME = "key";
+    public static final String RESPONSE_MESSAGE_BAD_API_KEY = "Bad API key";
+    public static final String ACCESS_DATAFILE_PATH_PREFIX = "/access/datafile/";
+
+    @Inject
+    protected PrivateUrlServiceBean privateUrlSvc;
+
+    @Inject
+    protected AuthenticationServiceBean authSvc;
+
+    @Inject
+    protected UserServiceBean userSvc;
+
+    private static final Logger logger = Logger.getLogger(ApiKeyAuthMechanism.class.getName());
+
+    @Override
+    public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse {
+        String apiKey = getRequestApiKey(containerRequestContext);
+        if (apiKey == null) {
+            return null;
+        }
+        PrivateUrlUser privateUrlUser = privateUrlSvc.getPrivateUrlUserFromToken(apiKey);
+        if (privateUrlUser != null) {
+            checkAnonymizedAccessToRequestPath(containerRequestContext.getUriInfo().getPath(), privateUrlUser);
+            return privateUrlUser;
+        }
+        AuthenticatedUser authUser = authSvc.lookupUser(apiKey);
+        if (authUser != null) {
+            authUser = userSvc.updateLastApiUseTime(authUser);
+            return authUser;
+        }
+        throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY);
+    }
+
+    private String getRequestApiKey(ContainerRequestContext containerRequestContext) {
+        String headerParamApiKey = containerRequestContext.getHeaderString(DATAVERSE_API_KEY_REQUEST_HEADER_NAME);
+        String queryParamApiKey = containerRequestContext.getUriInfo().getQueryParameters().getFirst(DATAVERSE_API_KEY_REQUEST_PARAM_NAME);
+
+        return headerParamApiKey != null ? headerParamApiKey : queryParamApiKey;
+    }
+
+    private void checkAnonymizedAccessToRequestPath(String requestPath, PrivateUrlUser privateUrlUser) throws WrappedAuthErrorResponse {
+        if (!privateUrlUser.hasAnonymizedAccess()) {
+            return;
+        }
+        // For privateUrlUsers restricted to anonymized access, all api calls are off-limits except for those used in the UI
+        // to download the file or image thumbs
+        if (!(requestPath.startsWith(ACCESS_DATAFILE_PATH_PREFIX) && !requestPath.substring(ACCESS_DATAFILE_PATH_PREFIX.length()).contains("/"))) {
+            logger.info("Anonymized access request for " + requestPath);
+            throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_API_KEY);
+        }
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java
new file mode 100644
index 00000000000..34a72d718f0
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthFilter.java
@@ -0,0 +1,35 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.api.ApiConstants;
+import edu.harvard.iq.dataverse.authorization.users.User;
+
+import jakarta.annotation.Priority;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.Priorities;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.container.ContainerRequestFilter;
+import jakarta.ws.rs.ext.Provider;
+import java.io.IOException;
+
+/**
+ * @author Guillermo Portas
+ * Dedicated filter to authenticate the user requesting an API endpoint that requires user authentication.
+ */
+@AuthRequired
+@Provider
+@Priority(Priorities.AUTHENTICATION)
+public class AuthFilter implements ContainerRequestFilter {
+
+    @Inject
+    private CompoundAuthMechanism compoundAuthMechanism;
+
+    @Override
+    public void filter(ContainerRequestContext containerRequestContext) throws IOException {
+        try {
+            User user = compoundAuthMechanism.findUserFromRequest(containerRequestContext);
+            containerRequestContext.setProperty(ApiConstants.CONTAINER_REQUEST_CONTEXT_USER, user);
+        } catch (WrappedAuthErrorResponse e) {
+            containerRequestContext.abortWith(e.getResponse());
+        }
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java
new file mode 100644
index 00000000000..bd34acbf702
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthMechanism.java
@@ -0,0 +1,24 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.authorization.users.User;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+
+/**
+ * @author Guillermo Portas
+ * This interface defines the common behavior for any kind of Dataverse API authentication mechanism.
+ * Any implementation must correspond to a particular Dataverse API authentication credential type.
+ */
+interface AuthMechanism {
+
+    /**
+     * Returns the user associated with a particular authentication credential provided in a request.
+     * If the credential is not provided, it is expected to return a null user.
+     * If the credential is provided, but is invalid, it will throw a WrappedAuthErrorResponse exception.
+     *
+     * @param containerRequestContext a ContainerRequestContext implementation.
+     * @return a user that can be null.
+     * @throws edu.harvard.iq.dataverse.api.auth.WrappedAuthErrorResponse if there is a credential provided, but invalid.
+     */
+    User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse;
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java
new file mode 100644
index 00000000000..bf0d785eeb3
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/AuthRequired.java
@@ -0,0 +1,19 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import jakarta.ws.rs.NameBinding;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+/**
+ * @author Guillermo Portas
+ * Annotation intended to be placed on any API method that requires user authentication.
+ * Marks the API methods whose related requests should be filtered by {@link edu.harvard.iq.dataverse.api.auth.AuthFilter}.
+ */
+@NameBinding
+@Retention(RUNTIME)
+@Target({ElementType.TYPE, ElementType.METHOD})
+public @interface AuthRequired {
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java
new file mode 100644
index 00000000000..31f524af3f0
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanism.java
@@ -0,0 +1,124 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import com.nimbusds.oauth2.sdk.ParseException;
+import com.nimbusds.oauth2.sdk.token.BearerAccessToken;
+import edu.harvard.iq.dataverse.UserServiceBean;
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
+import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.settings.FeatureFlags;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.HttpHeaders;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+
+public class BearerTokenAuthMechanism implements AuthMechanism {
+    private static final String BEARER_AUTH_SCHEME = "Bearer";
+    private static final Logger logger = Logger.getLogger(BearerTokenAuthMechanism.class.getCanonicalName());
+    
+    public static final String UNAUTHORIZED_BEARER_TOKEN = "Unauthorized bearer token";
+    public static final String INVALID_BEARER_TOKEN = "Could not parse bearer token";
+    public static final String BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED = "Bearer token detected, no OIDC provider configured";
+
+    @Inject
+    protected AuthenticationServiceBean authSvc;
+    @Inject
+    protected UserServiceBean userSvc;
+    
+    @Override
+    public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse {
+        if (FeatureFlags.API_BEARER_AUTH.enabled()) {
+            Optional<String> bearerToken = getRequestApiKey(containerRequestContext);
+            // No Bearer Token present, hence no user can be authenticated
+            if (bearerToken.isEmpty()) {
+                return null;
+            }
+            
+            // Validate and verify provided Bearer Token, and retrieve UserRecordIdentifier
+            // TODO: Get the identifier from an invalidating cache to avoid lookup bursts of the same token. Tokens in the cache should be removed after some (configurable) time.
+            UserRecordIdentifier userInfo = verifyOidcBearerTokenAndGetUserIdentifier(bearerToken.get());
+
+            // retrieve Authenticated User from AuthService
+            AuthenticatedUser authUser = authSvc.lookupUser(userInfo);
+            if (authUser != null) {
+                // track the API usage
+                authUser = userSvc.updateLastApiUseTime(authUser);
+                return authUser;
+            } else {
+                // a valid Token was presented, but we have no associated user account.
+                logger.log(Level.WARNING, "Bearer token detected, OIDC provider {0} validated Token but no linked UserAccount", userInfo.getUserRepoId());
+                // TODO: Instead of returning null, we should throw a meaningful error to the client.
+                // Probably this will be a wrapped auth error response with an error code and a string describing the problem.
+                return null;
+            }
+        }
+        return null;
+    }
+
+    /**
+     * Verifies the given Bearer token and obtain information about the corresponding user within respective AuthProvider.
+     *
+     * @param token The string containing the encoded JWT
+     * @return
+     */
+    private UserRecordIdentifier verifyOidcBearerTokenAndGetUserIdentifier(String token) throws WrappedAuthErrorResponse {
+        try {
+            BearerAccessToken accessToken = BearerAccessToken.parse(token);
+            // Get list of all authentication providers using Open ID Connect
+            // @TASK: Limited to OIDCAuthProviders, could be widened to OAuth2Providers.
+            List<OIDCAuthProvider> providers = authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class).stream()
+                    .map(providerId -> (OIDCAuthProvider) authSvc.getAuthenticationProvider(providerId))
+                    .collect(Collectors.toUnmodifiableList());
+            // If not OIDC Provider are configured we cannot validate a Token
+            if(providers.isEmpty()){
+                logger.log(Level.WARNING, "Bearer token detected, no OIDC provider configured");
+                throw new WrappedAuthErrorResponse(BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED);
+            }
+
+            // Iterate over all OIDC providers if multiple. Sadly needed as do not know which provided the Token.
+            for (OIDCAuthProvider provider : providers) {
+                try {
+                    // The OIDCAuthProvider need to verify a Bearer Token and equip the client means to identify the corresponding AuthenticatedUser.
+                    Optional<UserRecordIdentifier> userInfo = provider.getUserIdentifier(accessToken);
+                    if(userInfo.isPresent()) {
+                        logger.log(Level.FINE, "Bearer token detected, provider {0} confirmed validity and provided identifier", provider.getId());
+                        return userInfo.get();
+                    }
+                } catch (IOException e) {
+                    // TODO: Just logging this is not sufficient - if there is an IO error with the one provider
+                    //       which would have validated successfully, this is not the users fault. We need to
+                    //       take note and refer to that later when occurred.
+                    logger.log(Level.FINE, "Bearer token detected, provider " + provider.getId() + " indicates an invalid Token, skipping", e);
+                }
+            }
+        } catch (ParseException e) {
+            logger.log(Level.FINE, "Bearer token detected, unable to parse bearer token (invalid Token)", e);
+            throw new WrappedAuthErrorResponse(INVALID_BEARER_TOKEN);
+        }
+
+        // No UserInfo returned means we have an invalid access token.
+        logger.log(Level.FINE, "Bearer token detected, yet no configured OIDC provider validated it.");
+        throw new WrappedAuthErrorResponse(UNAUTHORIZED_BEARER_TOKEN);
+    }
+
+    /**
+     * Retrieve the raw, encoded token value from the Authorization Bearer HTTP header as defined in RFC 6750
+     * @return An {@link Optional} either empty if not present or the raw token from the header
+     */
+    private Optional<String> getRequestApiKey(ContainerRequestContext containerRequestContext) {
+        String headerParamApiKey = containerRequestContext.getHeaderString(HttpHeaders.AUTHORIZATION);
+        if (headerParamApiKey != null && headerParamApiKey.toLowerCase().startsWith(BEARER_AUTH_SCHEME.toLowerCase() + " ")) {
+            return Optional.of(headerParamApiKey);
+        } else {
+            return Optional.empty();
+        }
+    }
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java
new file mode 100644
index 00000000000..801e2752b9e
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanism.java
@@ -0,0 +1,50 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.authorization.users.GuestUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * @author Guillermo Portas
+ * Compound authentication mechanism that attempts to authenticate a user through the different authentication mechanisms (ordered by priority) of which it is composed.
+ * If no user is returned from any of the inner authentication mechanisms, a Guest user is returned.
+ */
+public class CompoundAuthMechanism implements AuthMechanism {
+
+    private final List<AuthMechanism> authMechanisms = new ArrayList<>();
+
+    @Inject
+    public CompoundAuthMechanism(ApiKeyAuthMechanism apiKeyAuthMechanism, WorkflowKeyAuthMechanism workflowKeyAuthMechanism, SignedUrlAuthMechanism signedUrlAuthMechanism, SessionCookieAuthMechanism sessionCookieAuthMechanism, BearerTokenAuthMechanism bearerTokenAuthMechanism) {
+        // Auth mechanisms should be ordered by priority here
+        add(apiKeyAuthMechanism, workflowKeyAuthMechanism, signedUrlAuthMechanism, sessionCookieAuthMechanism,bearerTokenAuthMechanism);
+    }
+
+    public CompoundAuthMechanism(AuthMechanism... authMechanisms) {
+        add(authMechanisms);
+    }
+
+    public void add(AuthMechanism... authMechanisms) {
+        this.authMechanisms.addAll(Arrays.asList(authMechanisms));
+    }
+
+    @Override
+    public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse {
+        User user = null;
+        for (AuthMechanism authMechanism : authMechanisms) {
+            User userFromRequest = authMechanism.findUserFromRequest(containerRequestContext);
+            if (userFromRequest != null) {
+                user = userFromRequest;
+                break;
+            }
+        }
+        if (user == null) {
+            user = GuestUser.get();
+        }
+        return user;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java
new file mode 100644
index 00000000000..c1471c3f5b3
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanism.java
@@ -0,0 +1,21 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.DataverseSession;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.settings.FeatureFlags;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
+
+public class SessionCookieAuthMechanism implements AuthMechanism {
+    @Inject
+    DataverseSession session;
+
+    @Override
+    public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse {
+        if (FeatureFlags.API_SESSION_AUTH.enabled()) {
+            return session.getUser();
+        }
+        return null;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java
new file mode 100644
index 00000000000..258661f6495
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanism.java
@@ -0,0 +1,84 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.users.ApiToken;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.UrlSignerUtil;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.UriInfo;
+
+import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
+
+import static edu.harvard.iq.dataverse.util.UrlSignerUtil.SIGNED_URL_TOKEN;
+import static edu.harvard.iq.dataverse.util.UrlSignerUtil.SIGNED_URL_USER;
+
+/**
+ * @author Guillermo Portas
+ * Authentication mechanism that attempts to authenticate a user from a Signed URL provided in an API request.
+ */
+public class SignedUrlAuthMechanism implements AuthMechanism {
+
+    public static final String RESPONSE_MESSAGE_BAD_SIGNED_URL = "Bad signed URL";
+
+    @Inject
+    protected AuthenticationServiceBean authSvc;
+    @Inject
+    protected PrivateUrlServiceBean privateUrlSvc;
+    
+    @Override
+    public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse {
+        String signedUrlRequestParameter = getSignedUrlRequestParameter(containerRequestContext);
+        if (signedUrlRequestParameter == null) {
+            return null;
+        }
+        User user = getAuthenticatedUserFromSignedUrl(containerRequestContext);
+        if (user != null) {
+            return user;
+        }
+        throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_SIGNED_URL);
+    }
+
+    private String getSignedUrlRequestParameter(ContainerRequestContext containerRequestContext) {
+        return containerRequestContext.getUriInfo().getQueryParameters().getFirst(SIGNED_URL_TOKEN);
+    }
+
+    private User getAuthenticatedUserFromSignedUrl(ContainerRequestContext containerRequestContext) {
+        User user = null;
+        // The signedUrl contains a param telling which user this is supposed to be for.
+        // We don't trust this. So we lookup that user, and get their API key, and use
+        // that as a secret in validating the signedURL. If the signature can't be
+        // validated with their key, the user (or their API key) has been changed and
+        // we reject the request.
+        UriInfo uriInfo = containerRequestContext.getUriInfo();
+        String userId = uriInfo.getQueryParameters().getFirst(SIGNED_URL_USER);
+        User targetUser = null; 
+        ApiToken userApiToken = null;
+        if (!userId.startsWith(PrivateUrlUser.PREFIX)) {
+            targetUser = authSvc.getAuthenticatedUser(userId);
+            userApiToken = authSvc.findApiTokenByUser((AuthenticatedUser) targetUser);
+        } else {
+            PrivateUrl privateUrl = privateUrlSvc.getPrivateUrlFromDatasetId(Long.parseLong(userId.substring(PrivateUrlUser.PREFIX.length())));
+            userApiToken = new ApiToken();
+            userApiToken.setTokenString(privateUrl.getToken());
+            targetUser = privateUrlSvc.getPrivateUrlUserFromToken(privateUrl.getToken());
+        }
+        if (targetUser != null && userApiToken != null) {
+            String signedUrl = URLDecoder.decode(uriInfo.getRequestUri().toString(), StandardCharsets.UTF_8);
+            String requestMethod = containerRequestContext.getMethod();
+            String signedUrlSigningKey = JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + userApiToken.getTokenString();
+            boolean isSignedUrlValid = UrlSignerUtil.isValidUrl(signedUrl, userId, requestMethod, signedUrlSigningKey);
+            if (isSignedUrlValid) {
+                user = targetUser;
+            }
+        }
+        return user;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java
new file mode 100644
index 00000000000..bbd67713e85
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanism.java
@@ -0,0 +1,42 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+
+import jakarta.inject.Inject;
+import jakarta.ws.rs.container.ContainerRequestContext;
+
+/**
+ * @author Guillermo Portas
+ * Authentication mechanism that attempts to authenticate a user from a Workflow Key provided in an API request.
+ */
+public class WorkflowKeyAuthMechanism implements AuthMechanism {
+
+    public static final String DATAVERSE_WORKFLOW_KEY_REQUEST_HEADER_NAME = "X-Dataverse-invocationID";
+    public static final String DATAVERSE_WORKFLOW_KEY_REQUEST_PARAM_NAME = "invocationID";
+    public static final String RESPONSE_MESSAGE_BAD_WORKFLOW_KEY = "Bad workflow invocationID";
+
+    @Inject
+    protected AuthenticationServiceBean authSvc;
+
+    @Override
+    public User findUserFromRequest(ContainerRequestContext containerRequestContext) throws WrappedAuthErrorResponse {
+        String workflowKey = getRequestWorkflowKey(containerRequestContext);
+        if (workflowKey == null) {
+            return null;
+        }
+        AuthenticatedUser authUser = authSvc.lookupUserForWorkflowInvocationID(workflowKey);
+        if (authUser != null) {
+            return authUser;
+        }
+        throw new WrappedAuthErrorResponse(RESPONSE_MESSAGE_BAD_WORKFLOW_KEY);
+    }
+
+    private String getRequestWorkflowKey(ContainerRequestContext containerRequestContext) {
+        String headerParamWorkflowKey = containerRequestContext.getHeaderString(DATAVERSE_WORKFLOW_KEY_REQUEST_HEADER_NAME);
+        String queryParamWorkflowKey = containerRequestContext.getUriInfo().getQueryParameters().getFirst(DATAVERSE_WORKFLOW_KEY_REQUEST_PARAM_NAME);
+
+        return headerParamWorkflowKey != null ? headerParamWorkflowKey : queryParamWorkflowKey;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java
new file mode 100644
index 00000000000..40431557261
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/auth/WrappedAuthErrorResponse.java
@@ -0,0 +1,30 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.api.ApiConstants;
+import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
+
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+
+public class WrappedAuthErrorResponse extends Exception {
+
+    private final String message;
+    private final Response response;
+
+    public WrappedAuthErrorResponse(String message) {
+        this.message = message;
+        this.response = Response.status(Response.Status.UNAUTHORIZED)
+                .entity(NullSafeJsonBuilder.jsonObjectBuilder()
+                        .add("status", ApiConstants.STATUS_ERROR)
+                        .add("message", message).build()
+                ).type(MediaType.APPLICATION_JSON_TYPE).build();
+    }
+
+    public String getMessage() {
+        return this.message;
+    }
+
+    public Response getResponse() {
+        return response;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java
index 37c29f20efe..09a60b1b700 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/BatchJobResource.java
@@ -4,17 +4,17 @@
 import edu.harvard.iq.dataverse.api.AbstractApiBean;
 import edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity;
 
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.JobExecution;
-import javax.batch.runtime.JobInstance;
-import javax.ejb.Stateless;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.JobExecution;
+import jakarta.batch.runtime.JobInstance;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java
index 688a0267085..b7a6b7cfafd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/batchjob/FileRecordJobResource.java
@@ -3,21 +3,24 @@
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.PermissionServiceBean;
 import edu.harvard.iq.dataverse.api.AbstractApiBean;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode;
 import edu.harvard.iq.dataverse.engine.command.impl.ImportFromFileSystemCommand;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ws.rs.DefaultValue;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ws.rs.DefaultValue;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 
 @Stateless
 @Path("batch/jobs")
@@ -33,13 +36,15 @@ public class FileRecordJobResource extends AbstractApiBean {
     DatasetServiceBean datasetService;
 
     @POST
+    @AuthRequired
     @Path("import/datasets/files/{identifier}")
     @Produces(MediaType.APPLICATION_JSON)
-    public Response getFilesystemImport(@PathParam("identifier") String identifier,
-            @QueryParam("mode") @DefaultValue("MERGE") String mode,
-            /*@QueryParam("fileMode") @DefaultValue("package_file") String fileMode*/
-            @QueryParam("uploadFolder") String uploadFolder,
-            @QueryParam("totalSize") Long totalSize) {
+    public Response getFilesystemImport(@Context ContainerRequestContext crc,
+                                        @PathParam("identifier") String identifier,
+                                        @QueryParam("mode") @DefaultValue("MERGE") String mode,
+                                        /*@QueryParam("fileMode") @DefaultValue("package_file") String fileMode*/
+                                        @QueryParam("uploadFolder") String uploadFolder,
+                                        @QueryParam("totalSize") Long totalSize) {
         return response(req -> {
             ImportMode importMode = ImportMode.MERGE;
             // Switch to this if you ever need to use something other than MERGE.
@@ -53,7 +58,7 @@ public Response getFilesystemImport(@PathParam("identifier") String identifier,
                     .add("message", returnString)
                     .add("executionId", jsonObject.getInt("executionId"))
             );
-        });
+        }, getRequestUser(crc));
     }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
index 6543d771ebe..5bc50903be8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionDepositManagerImpl.java
@@ -18,12 +18,12 @@
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
 import org.apache.abdera.parser.ParseException;
 import org.swordapp.server.AuthCredentials;
 import org.swordapp.server.CollectionDepositManager;
@@ -174,7 +174,9 @@ public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCrede
                     // curl --insecure --data-binary "@multipart.dat" -H 'Content-Type: multipart/related; boundary="===============0670350989=="' -H "MIME-Version: 1.0" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/hdl:1902.1/12345
                     // but...
                     // "Yeah, multipart is critically broken across all implementations" -- http://www.mail-archive.com/sword-app-tech@lists.sourceforge.net/msg00327.html
-                    throw new UnsupportedOperationException("Not yet implemented");
+                    //
+                    // OB 2022-03-24 -> sword2-server v2.0 library drops support for multipart/related.
+                    throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Multipart/related RFC2387 type posts are not supported. Please POST an Atom entry instead.");
                 } else {
                     throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "expected deposit types are isEntryOnly, isBinaryOnly, and isMultiPart");
                 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
index 2f3777ed6ab..541fa144e80 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/CollectionListManagerImpl.java
@@ -11,9 +11,9 @@
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
 import javax.xml.namespace.QName;
 import org.apache.abdera.Abdera;
 import org.apache.abdera.i18n.iri.IRI;
@@ -78,8 +78,8 @@ public Feed listCollectionContents(IRI iri, AuthCredentials authCredentials, Swo
                     if (!permissionService.isUserAllowedOn(user, new UpdateDatasetVersionCommand(dataset, dvReq), dataset)) {
                         continue;
                     }
-                    String editUri = baseUrl + "/edit/study/" + dataset.getGlobalIdString();
-                    String editMediaUri = baseUrl + "/edit-media/study/" + dataset.getGlobalIdString();
+                    String editUri = baseUrl + "/edit/study/" + dataset.getGlobalId().asString();
+                    String editMediaUri = baseUrl + "/edit-media/study/" + dataset.getGlobalId().asString();
                     Entry entry = feed.addEntry();
                     entry.setId(editUri);
                     entry.setTitle(datasetService.getTitleFromLatestVersion(dataset.getId()));
@@ -94,7 +94,7 @@ public Feed listCollectionContents(IRI iri, AuthCredentials authCredentials, Swo
                 throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse: " + dvAlias);
             }
         } else {
-            throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't determine target type or identifer from URL: " + iri);
+            throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't determine target type or identifier from URL: " + iri);
         }
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
index 8fb55a8eaf6..4d4d1d08b51 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ContainerManagerImpl.java
@@ -25,12 +25,12 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.servlet.http.HttpServletRequest;
 import org.apache.abdera.parser.ParseException;
 import org.swordapp.server.AuthCredentials;
 import org.swordapp.server.ContainerManager;
@@ -86,7 +86,7 @@ public DepositReceipt getEntry(String uri, Map<String, String> map, AuthCredenti
                 Dataset dataset = datasetService.findByGlobalId(globalId);
                 if (dataset != null) {
                     if (!permissionService.isUserAllowedOn(user, new GetDraftDatasetVersionCommand(dvReq, dataset), dataset)) {
-                        throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to retrieve entry for " + dataset.getGlobalIdString());
+                        throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to retrieve entry for " + dataset.getGlobalId().asString());
                     }
                     Dataverse dvThatOwnsDataset = dataset.getOwner();
                     ReceiptGenerator receiptGenerator = new ReceiptGenerator();
@@ -228,21 +228,21 @@ public void deleteContainer(String uri, AuthCredentials authCredentials, SwordCo
                         DatasetVersion.VersionState datasetVersionState = dataset.getLatestVersion().getVersionState();
                         if (dataset.isReleased()) {
                             if (datasetVersionState.equals(DatasetVersion.VersionState.DRAFT)) {
-                                logger.info("destroying working copy version of dataset " + dataset.getGlobalIdString());
+                                logger.info("destroying working copy version of dataset " + dataset.getGlobalId().asString());
                                 try {
                                     engineSvc.submit(deleteDatasetVersionCommand);
                                 } catch (CommandException ex) {
-                                    throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset version for " + dataset.getGlobalIdString() + ": " + ex);
+                                    throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can't delete dataset version for " + dataset.getGlobalId().asString() + ": " + ex);
                                 }
                                 logger.info("dataset version deleted for dataset id " + dataset.getId());
                             } else if (datasetVersionState.equals(DatasetVersion.VersionState.RELEASED)) {
                                 throw new SwordError(UriRegistry.ERROR_METHOD_NOT_ALLOWED, "Deaccessioning a dataset is no longer supported as of Data Deposit API version in URL (" + swordConfiguration.getBaseUrlPathV1() + ") Equivalent functionality is being developed at https://github.com/IQSS/dataverse/issues/778");
                             } else if (datasetVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) {
-                                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of dataset " + dataset.getGlobalIdString() + " has already been deaccessioned.");
+                                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of dataset " + dataset.getGlobalId().asString() + " has already been deaccessioned.");
                             } else if (datasetVersionState.equals(DatasetVersion.VersionState.ARCHIVED)) {
-                                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of dataset " + dataset.getGlobalIdString() + " has been archived and can not be deleted or deaccessioned.");
+                                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Lastest version of dataset " + dataset.getGlobalId().asString() + " has been archived and can not be deleted or deaccessioned.");
                             } else {
-                                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for dataset " + dataset.getGlobalIdString() + " in state " + datasetVersionState);
+                                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Operation not valid for dataset " + dataset.getGlobalId().asString() + " in state " + datasetVersionState);
                             }
                             /**
                              * @todo Reformat else below properly so you can
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
index 5491024c73c..3f5345d8e0d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
@@ -6,20 +6,21 @@
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
 import edu.harvard.iq.dataverse.PermissionServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.dataaccess.StorageIO;
-import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil;
 import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException;
+import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
-import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
@@ -29,12 +30,12 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
 
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import org.swordapp.server.AuthCredentials;
@@ -69,6 +70,8 @@ public class MediaResourceManagerImpl implements MediaResourceManager {
     SwordAuth swordAuth;
     @Inject
     UrlManager urlManager;
+    @Inject
+    DataverseRequestServiceBean dvRequestService;
 
     private HttpServletRequest httpRequest;
 
@@ -111,7 +114,7 @@ public MediaResource getMediaResourceRepresentation(String uri, Map<String, Stri
                         MediaResource mediaResource = new MediaResource(fixmeInputStream, contentType, packaging, isPackaged);
                         return mediaResource;
                     } else {
-                        throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to get a media resource representation of the dataset with global ID " + dataset.getGlobalIdString());
+                        throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to get a media resource representation of the dataset with global ID " + dataset.getGlobalId().asString());
                     }
                 } else {
                     throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Downloading files via the SWORD-based Dataverse Data Deposit API is not (yet) supported: https://github.com/IQSS/dataverse/issues/183");
@@ -216,7 +219,7 @@ public void deleteMediaResource(String uri, AuthCredentials authCredentials, Swo
                 throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported file type found in URL: " + uri);
             }
         } else {
-            throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Target or identifer not specified in URL: " + uri);
+            throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Target or identifier not specified in URL: " + uri);
         }
     }
 
@@ -243,7 +246,7 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au
             }
             UpdateDatasetVersionCommand updateDatasetCommand = new UpdateDatasetVersionCommand(dataset, dvReq);
             if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) {
-                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataset with global ID " + dataset.getGlobalIdString());
+                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataset with global ID " + dataset.getGlobalId().asString());
             }
             
             //---------------------------------------
@@ -301,37 +304,42 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au
              */
             String guessContentTypeForMe = null;
             List<DataFile> dataFiles = new ArrayList<>();
+
             try {
-                try {
-                    CreateDataFileResult createDataFilesResponse =  FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig);
-                    dataFiles = createDataFilesResponse.getDataFiles();
-                } catch (EJBException ex) {
-                    Throwable cause = ex.getCause();
-                    if (cause != null) {
-                        if (cause instanceof IllegalArgumentException) {
-                            /**
-                             * @todo should be safe to remove this catch of
-                             * EJBException and IllegalArgumentException once
-                             * this ticket is resolved:
-                             *
-                             * IllegalArgumentException: MALFORMED when
-                             * uploading certain zip files
-                             * https://github.com/IQSS/dataverse/issues/1021
-                             */
-                            throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. Problem with zip file, perhaps: " + cause);
-                        } else {
-                            throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + cause);
-                        }
+                //CreateDataFileResult createDataFilesResponse =  FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, null, systemConfig);
+                UploadSessionQuotaLimit quota = null; 
+                if (systemConfig.isStorageQuotasEnforced()) {
+                    quota = dataFileService.getUploadSessionQuotaLimit(dataset);
+                }
+                Command<CreateDataFileResult> cmd = new CreateNewDataFilesCommand(dvReq, editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, null, quota, null);
+                CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
+                dataFiles = createDataFilesResult.getDataFiles();
+            } catch (CommandException ex) {
+                Throwable cause = ex.getCause();
+                if (cause != null) {
+                    if (cause instanceof IllegalArgumentException) {
+                        /**
+                         * @todo should be safe to remove this catch of
+                         * EJBException and IllegalArgumentException once this
+                         * ticket is resolved:
+                         *
+                         * IllegalArgumentException: MALFORMED when uploading
+                         * certain zip files
+                         * https://github.com/IQSS/dataverse/issues/1021
+                         */
+                        throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset. Problem with zip file, perhaps: " + cause);
                     } else {
-                        throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. No cause: " + ex.getMessage());
+                        throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + cause);
                     }
-                } /*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) {
+                } else {
+                    throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage());
+                }
+            }
+            /*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) {
                     throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + ex.getMessage());
                     //Logger.getLogger(MediaResourceManagerImpl.class.getName()).log(Level.SEVERE, null, ex);
-                }*/
-            } catch (IOException ex) {
-                throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage());
-            }
+            }*/
+            
             if (!dataFiles.isEmpty()) {
                 Set<ConstraintViolation> constraintViolations = editVersion.validate();
                 if (constraintViolations.size() > 0) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ReceiptGenerator.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ReceiptGenerator.java
index 69728d0bc96..37a59fe6365 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ReceiptGenerator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ReceiptGenerator.java
@@ -13,7 +13,7 @@ public class ReceiptGenerator {
     DepositReceipt createDatasetReceipt(String baseUrl, Dataset dataset) {
         logger.fine("baseUrl was: " + baseUrl);
         DepositReceipt depositReceipt = new DepositReceipt();
-        String globalId = dataset.getGlobalIdString();
+        String globalId = dataset.getGlobalId().asString();
         String editIri = baseUrl + "/edit/study/" + globalId;
         depositReceipt.setEditIRI(new IRI(editIri));
         /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java
index a761afd1324..c509a8d6f52 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2CollectionServlet.java
@@ -2,10 +2,10 @@
 
 import java.io.IOException;
 import java.util.concurrent.locks.ReentrantLock;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.CollectionAPI;
 import org.swordapp.server.servlets.SwordServlet;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java
index d8ba8eec4ca..53dce24c0fe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ContainerServlet.java
@@ -2,10 +2,10 @@
 
 import java.io.IOException;
 import java.util.concurrent.locks.ReentrantLock;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.ContainerAPI;
 import org.swordapp.server.ContainerManager;
 import org.swordapp.server.StatementManager;
@@ -17,9 +17,11 @@ public class SWORDv2ContainerServlet extends SwordServlet {
     ContainerManagerImpl containerManagerImpl;
     @Inject
     StatementManagerImpl statementManagerImpl;
-    private ContainerManager cm;
+    // this field can be replaced by local variable
+//    private ContainerManager cm;
     private ContainerAPI api;
-    private StatementManager sm;
+    // this field can be replaced by local variable
+//    private StatementManager sm;
     private final ReentrantLock lock = new ReentrantLock();
     
     
@@ -28,13 +30,15 @@ public void init() throws ServletException {
         super.init();
 
         // load the container manager implementation
-        this.cm = containerManagerImpl;
-
-        // load the statement manager implementation
-        this.sm = statementManagerImpl;
+//        this.cm = containerManagerImpl;
+        ContainerManager cm = containerManagerImpl;
+                // load the statement manager implementation
+//        this.sm = statementManagerImpl;
+        StatementManager sm = statementManagerImpl;
 
         // initialise the underlying servlet processor
-        this.api = new ContainerAPI(this.cm, this.sm, this.config);
+//        this.api = new ContainerAPI(this.cm, this.sm, this.config);
+        this.api = new ContainerAPI(cm, sm, this.config);
     }
 
     @Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java
index c455a6fd26a..245ab6ab23b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2MediaResourceServlet.java
@@ -2,10 +2,10 @@
 
 import java.io.IOException;
 import java.util.concurrent.locks.ReentrantLock;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.MediaResourceAPI;
 import org.swordapp.server.servlets.SwordServlet;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java
index 37db76d3c9c..eab005d87fa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2ServiceDocumentServlet.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.api.datadeposit;
 
 import java.io.IOException;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.ServiceDocumentAPI;
 import org.swordapp.server.servlets.SwordServlet;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java
index ed1202d8c77..4bcc9c6afe8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SWORDv2StatementServlet.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.api.datadeposit;
 
 import java.io.IOException;
-import javax.inject.Inject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.inject.Inject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import org.swordapp.server.StatementAPI;
 import org.swordapp.server.StatementManager;
 import org.swordapp.server.servlets.SwordServlet;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
index 049b20f605b..134d54aef88 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/ServiceDocumentManagerImpl.java
@@ -8,8 +8,8 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.inject.Inject;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
 
 import org.apache.commons.lang3.StringUtils;
 import org.swordapp.server.AuthCredentials;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
index 9fb372d7121..95763e0eafb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/StatementManagerImpl.java
@@ -17,9 +17,9 @@
 import java.util.Optional;
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.joining;
-import javax.ejb.EJB;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
 import org.apache.abdera.i18n.iri.IRI;
 import org.apache.abdera.i18n.iri.IRISyntaxException;
 import org.apache.abdera.model.AtomDate;
@@ -68,7 +68,7 @@ public Statement getStatement(String editUri, Map<String, String> map, AuthCrede
             if (!permissionService.isUserAllowedOn(user, new GetDraftDatasetVersionCommand(dvReq, dataset), dataset)) {
                 throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to view dataset with global ID " + globalId);
             }
-            String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + dataset.getGlobalIdString();
+            String feedUri = urlManager.getHostnamePlusBaseUrlPath(editUri) + "/edit/study/" + dataset.getGlobalId().asString();
             String author = dataset.getLatestVersion().getAuthorsStr();
             String title = dataset.getLatestVersion().getTitle();
             // in the statement, the element is called "updated"
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java
index 1e506c6a0b1..a5564e9fbdb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordConfigurationImpl.java
@@ -6,7 +6,7 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import org.swordapp.server.SwordConfiguration;
 
 public class SwordConfigurationImpl implements SwordConfiguration {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java
new file mode 100644
index 00000000000..aa7e028a4ba
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordFilter.java
@@ -0,0 +1,49 @@
+package edu.harvard.iq.dataverse.api.datadeposit;
+
+import jakarta.servlet.Filter;
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.ServletRequest;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequestWrapper;
+import java.io.IOException;
+
+public class SwordFilter implements Filter {
+
+    @Override
+    public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
+        HttpServletRequest req = (HttpServletRequest) request;
+        MutateHeaders requestWrapper = new MutateHeaders(req);
+        chain.doFilter(requestWrapper, response);
+    }
+
+    /**
+     * We are mutating headers because Payara 6 is more strict than Paraya 5 and
+     * wants "attachment; filename=" instead of just "filename=". In order to
+     * not break backward compatibility, we add "attachment; " for our (SWORD)
+     * API users. (This only seem to affect our SWORD API.) That is, the can
+     * continue to send '-H "Content-Disposition: filename=example.zip"' as
+     * we've documented for years.
+     */
+    public class MutateHeaders extends HttpServletRequestWrapper {
+
+        public MutateHeaders(HttpServletRequest request) {
+            super(request);
+        }
+
+        // inspired by https://stackoverflow.com/questions/2811769/adding-an-http-header-to-the-request-in-a-servlet-filter/2811841#2811841
+        @Override
+        public String getHeader(String name) {
+            String header = super.getHeader(name);
+            if ("Content-Disposition".equalsIgnoreCase(name)) {
+                if (header.startsWith("filename=")) {
+                    header = header.replaceFirst("filename=", "attachment; filename=");
+                }
+            }
+            return (header != null) ? header : super.getParameter(name);
+        }
+
+    }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java
index 2e093dbcf36..22b6ee05e48 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/SwordServiceBean.java
@@ -19,10 +19,10 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import org.apache.commons.lang3.StringUtils;
 import org.swordapp.server.SwordEntry;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java
index d5be8f72fce..3fc31730ba2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DatasetDTO.java
@@ -18,8 +18,6 @@ public class DatasetDTO implements java.io.Serializable {
         private String metadataLanguage;
         private DatasetVersionDTO datasetVersion;
         private List<DataFileDTO> dataFiles;
-	public static final String DOI_PROTOCOL = "doi";
-	public static final String HDL_PROTOCOL = "hdl";
 
     public String getId() {
         return id;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java
index 4cbf31d1d2c..bb57059a99a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ConstraintViolationExceptionHandler.java
@@ -1,17 +1,15 @@
 package edu.harvard.iq.dataverse.api.errorhandlers;
 
-import edu.harvard.iq.dataverse.util.json.JsonPrinter;
-
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
 import java.util.List;
 import java.util.stream.Collectors;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java
index 286272d9de3..2f974a1c5be 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/JsonParseExceptionHandler.java
@@ -2,17 +2,14 @@
 
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
 
-import javax.json.Json;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
-import java.util.UUID;
-import java.util.logging.Level;
-import java.util.logging.Logger;
+import jakarta.json.Json;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
 
 /**
  * Make a failing JSON parsing request appear to be a BadRequest (error code 400)
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java
index 4064ee21474..8e43a1876bf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/ThrowableHandler.java
@@ -2,11 +2,11 @@
 
 import edu.harvard.iq.dataverse.api.util.JsonResponseBuilder;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
 import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java
index 5f28bfd0afc..e67e91e63c9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/errorhandlers/WebApplicationExceptionHandler.java
@@ -8,12 +8,12 @@
 import edu.harvard.iq.dataverse.api.util.JsonResponseBuilder;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import javax.ws.rs.ext.Provider;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.WebApplicationException;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.ext.ExceptionMapper;
+import jakarta.ws.rs.ext.Provider;
 import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java
index fc96215cef0..2bea36a6047 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldMap.java
@@ -7,14 +7,14 @@
 package edu.harvard.iq.dataverse.api.imports;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java
index e7b8e71495b..240baeefcff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/CustomFieldServiceBean.java
@@ -5,9 +5,9 @@
  */
 package edu.harvard.iq.dataverse.api.imports;
 
-import javax.ejb.Stateless;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
index d9433832309..73a83035fc5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportDDIServiceBean.java
@@ -22,15 +22,14 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
 import javax.xml.stream.XMLStreamConstants;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
 import javax.xml.stream.XMLInputFactory;
 
-import edu.harvard.iq.dataverse.util.json.ControlledVocabularyException;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -1266,24 +1265,26 @@ private void parseVersionNumber(DatasetVersionDTO dvDTO, String versionNumber) {
 
     }
    
-   private void processSerStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException {
-          FieldDTO seriesName=null;
-          FieldDTO seriesInformation=null;
-          for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
+    private void processSerStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException {
+        FieldDTO seriesInformation = null;
+        FieldDTO seriesName = null;
+        for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {            
             if (event == XMLStreamConstants.START_ELEMENT) {
+                if (xmlr.getLocalName().equals("serInfo")) {
+                     seriesInformation = FieldDTO.createPrimitiveFieldDTO("seriesInformation", parseText(xmlr));
+                }
                 if (xmlr.getLocalName().equals("serName")) {
-                   seriesName = FieldDTO.createPrimitiveFieldDTO("seriesName", parseText(xmlr));
-                  
-                } else if (xmlr.getLocalName().equals("serInfo")) {
-                    seriesInformation=FieldDTO.createPrimitiveFieldDTO("seriesInformation", parseText(xmlr) );
+                     seriesName = FieldDTO.createPrimitiveFieldDTO("seriesName", parseText(xmlr));
                 }
             } else if (event == XMLStreamConstants.END_ELEMENT) {
                 if (xmlr.getLocalName().equals("serStmt")) {
-                    citation.getFields().add(FieldDTO.createCompoundFieldDTO("series",seriesName,seriesInformation ));
+                    if (seriesInformation != null || seriesName != null) {
+                        citation.addField(FieldDTO.createMultipleCompoundFieldDTO("series", seriesName, seriesInformation ));
+                    }
                     return;
                 }
             }
-        }
+        }     
     }
 
     private void processDistStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) throws XMLStreamException {
@@ -1337,6 +1338,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th
         List<HashSet<FieldDTO>> producers = new ArrayList<>();
         List<HashSet<FieldDTO>> grants = new ArrayList<>();
         List<HashSet<FieldDTO>> software = new ArrayList<>();
+        List<String> prodPlac = new ArrayList<>();
 
         for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
             if (event == XMLStreamConstants.START_ELEMENT) {
@@ -1352,9 +1354,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th
                 } else if (xmlr.getLocalName().equals("prodDate")) {
                     citation.getFields().add(FieldDTO.createPrimitiveFieldDTO("productionDate", parseDate(xmlr, "prodDate")));
                 } else if (xmlr.getLocalName().equals("prodPlac")) {
-                    List<String> prodPlac = new ArrayList<>();
-                    prodPlac.add(parseText(xmlr, "prodPlac"));
-                    citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac));
+                    prodPlac.add(parseText(xmlr));
                 } else if (xmlr.getLocalName().equals("software")) {
                     HashSet<FieldDTO> set = new HashSet<>();
                     addToSet(set,"softwareVersion", xmlr.getAttributeValue(null, "version"));
@@ -1387,6 +1387,9 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th
                     if (producers.size()>0) {
                         citation.getFields().add(FieldDTO.createMultipleCompoundFieldDTO("producer", producers));
                     }
+                    if (prodPlac.size() > 0) {
+                        citation.getFields().add(FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.productionPlace, prodPlac));
+                    }
                     return;
                 }
             }
@@ -1396,6 +1399,7 @@ private void processProdStmt(XMLStreamReader xmlr, MetadataBlockDTO citation) th
    private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws XMLStreamException, ImportException {
        MetadataBlockDTO citation = datasetDTO.getDatasetVersion().getMetadataBlocks().get("citation");
        List<HashSet<FieldDTO>> otherIds = new ArrayList<>();
+       List<String> altTitles = new ArrayList<>();
        
        for (int event = xmlr.next(); event != XMLStreamConstants.END_DOCUMENT; event = xmlr.next()) {
             if (event == XMLStreamConstants.START_ELEMENT) {
@@ -1406,8 +1410,7 @@ private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws
                   FieldDTO field = FieldDTO.createPrimitiveFieldDTO("subtitle", parseText(xmlr));
                    citation.getFields().add(field);
                 } else if (xmlr.getLocalName().equals("altTitl")) {
-                  FieldDTO field = FieldDTO.createPrimitiveFieldDTO("alternativeTitle", parseText(xmlr));
-                   citation.getFields().add(field);
+                    altTitles.add(parseText(xmlr));
                 } else if (xmlr.getLocalName().equals("IDNo")) {
                     if ( AGENCY_HANDLE.equals( xmlr.getAttributeValue(null, "agency") ) || AGENCY_DOI.equals( xmlr.getAttributeValue(null, "agency") ) ) {
                         importGenericService.reassignIdentifierAsGlobalId(parseText(xmlr), datasetDTO);
@@ -1435,6 +1438,10 @@ private void processTitlStmt(XMLStreamReader xmlr, DatasetDTO datasetDTO) throws
                     if (otherIds.size()>0) {
                         citation.addField(FieldDTO.createMultipleCompoundFieldDTO("otherId", otherIds));
                     }
+                    if (!altTitles.isEmpty()) {
+                        FieldDTO field = FieldDTO.createMultiplePrimitiveFieldDTO(DatasetFieldConstant.alternativeTitle, altTitles);
+                        citation.getFields().add(field);
+                    }
                     return;
                 }
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
index c651db2dfae..f7a6cf54dd5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBean.java
@@ -1,6 +1,9 @@
 package edu.harvard.iq.dataverse.api.imports;
 
 import com.google.gson.Gson;
+
+import edu.harvard.iq.dataverse.DOIServiceBean;
+import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
 import edu.harvard.iq.dataverse.DatasetFieldConstant;
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
@@ -8,12 +11,13 @@
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.ForeignMetadataFieldMapping;
 import edu.harvard.iq.dataverse.ForeignMetadataFormatMapping;
-import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
 import edu.harvard.iq.dataverse.api.dto.*;  
 import edu.harvard.iq.dataverse.api.dto.FieldDTO;
 import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
@@ -27,19 +31,19 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
 import javax.xml.stream.XMLStreamConstants;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 import javax.xml.stream.XMLInputFactory;
 import net.handle.hdllib.HandleException;
 import net.handle.hdllib.HandleResolver;
@@ -348,7 +352,7 @@ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) {
         if (!otherIds.isEmpty()) {
             // We prefer doi or hdl identifiers like "doi:10.7910/DVN/1HE30F"
             for (String otherId : otherIds) {
-                if (otherId.startsWith(GlobalId.DOI_PROTOCOL) || otherId.startsWith(GlobalId.HDL_PROTOCOL) || otherId.startsWith(GlobalId.DOI_RESOLVER_URL) || otherId.startsWith(GlobalId.HDL_RESOLVER_URL) || otherId.startsWith(GlobalId.HTTP_DOI_RESOLVER_URL) || otherId.startsWith(GlobalId.HTTP_HDL_RESOLVER_URL) || otherId.startsWith(GlobalId.DXDOI_RESOLVER_URL) || otherId.startsWith(GlobalId.HTTP_DXDOI_RESOLVER_URL)) {
+                if (otherId.startsWith(DOIServiceBean.DOI_PROTOCOL) || otherId.startsWith(HandlenetServiceBean.HDL_PROTOCOL) || otherId.startsWith(DOIServiceBean.DOI_RESOLVER_URL) || otherId.startsWith(HandlenetServiceBean.HDL_RESOLVER_URL) || otherId.startsWith(DOIServiceBean.HTTP_DOI_RESOLVER_URL) || otherId.startsWith(HandlenetServiceBean.HTTP_HDL_RESOLVER_URL) || otherId.startsWith(DOIServiceBean.DXDOI_RESOLVER_URL) || otherId.startsWith(DOIServiceBean.HTTP_DXDOI_RESOLVER_URL)) {
                     return otherId;
                 }
             }
@@ -357,7 +361,7 @@ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) {
                 try {
                     HandleResolver hr = new HandleResolver();
                     hr.resolveHandle(otherId);
-                    return GlobalId.HDL_PROTOCOL + ":" + otherId;
+                    return HandlenetServiceBean.HDL_PROTOCOL + ":" + otherId;
                 } catch (HandleException e) {
                     logger.fine("Not a valid handle: " + e.toString());
                 }
@@ -371,6 +375,8 @@ private String getOtherIdFromDTO(DatasetVersionDTO datasetVersionDTO) {
      * protocol/authority/identifier parts that are assigned to the datasetDTO.
      * The name reflects the original purpose but it is now used in ImportDDIServiceBean as well.
      */
+    
+    //ToDo - sync with GlobalId.parsePersistentId(String) ? - that currently doesn't do URL forms, but could
     public String reassignIdentifierAsGlobalId(String identifierString, DatasetDTO datasetDTO) {
 
         int index1 = identifierString.indexOf(':');
@@ -382,24 +388,29 @@ public String reassignIdentifierAsGlobalId(String identifierString, DatasetDTO d
        
         String protocol = identifierString.substring(0, index1);
         
-        if (GlobalId.DOI_PROTOCOL.equals(protocol) || GlobalId.HDL_PROTOCOL.equals(protocol)) {
-            logger.fine("Processing hdl:- or doi:-style identifier : "+identifierString);        
+        if (DOIServiceBean.DOI_PROTOCOL.equals(protocol) || HandlenetServiceBean.HDL_PROTOCOL.equals(protocol) || PermaLinkPidProviderServiceBean.PERMA_PROTOCOL.equals(protocol)) {
+            logger.fine("Processing hdl:- or doi:- or perma:-style identifier : "+identifierString);        
         
         } else if ("http".equalsIgnoreCase(protocol) || "https".equalsIgnoreCase(protocol)) {
             
             // We also recognize global identifiers formatted as global resolver URLs:
-            
-            if (identifierString.startsWith(GlobalId.HDL_RESOLVER_URL) || identifierString.startsWith(GlobalId.HTTP_HDL_RESOLVER_URL)) {
+            //ToDo - refactor index1 always has -1 here so that we can use index1+1 later
+            //ToDo - single map of protocol/url, are all three cases the same then?
+            if (identifierString.startsWith(HandlenetServiceBean.HDL_RESOLVER_URL) || identifierString.startsWith(HandlenetServiceBean.HTTP_HDL_RESOLVER_URL)) {
                 logger.fine("Processing Handle identifier formatted as a resolver URL: "+identifierString);
-                protocol = GlobalId.HDL_PROTOCOL;
-                index1 = (identifierString.startsWith(GlobalId.HDL_RESOLVER_URL)) ? GlobalId.HDL_RESOLVER_URL.length() - 1 : GlobalId.HTTP_HDL_RESOLVER_URL.length() - 1;
+                protocol = HandlenetServiceBean.HDL_PROTOCOL;
+                index1 = (identifierString.startsWith(HandlenetServiceBean.HDL_RESOLVER_URL)) ? HandlenetServiceBean.HDL_RESOLVER_URL.length() - 1 : HandlenetServiceBean.HTTP_HDL_RESOLVER_URL.length() - 1;
                 index2 = identifierString.indexOf("/", index1 + 1);
-            } else if (identifierString.startsWith(GlobalId.DOI_RESOLVER_URL) || identifierString.startsWith(GlobalId.HTTP_DOI_RESOLVER_URL) || identifierString.startsWith(GlobalId.DXDOI_RESOLVER_URL) || identifierString.startsWith(GlobalId.HTTP_DXDOI_RESOLVER_URL)) {
+            } else if (identifierString.startsWith(DOIServiceBean.DOI_RESOLVER_URL) || identifierString.startsWith(DOIServiceBean.HTTP_DOI_RESOLVER_URL) || identifierString.startsWith(DOIServiceBean.DXDOI_RESOLVER_URL) || identifierString.startsWith(DOIServiceBean.HTTP_DXDOI_RESOLVER_URL)) {
                 logger.fine("Processing DOI identifier formatted as a resolver URL: "+identifierString);
-                protocol = GlobalId.DOI_PROTOCOL;
-                identifierString = identifierString.replace(GlobalId.DXDOI_RESOLVER_URL, GlobalId.DOI_RESOLVER_URL);
-                identifierString = identifierString.replace(GlobalId.HTTP_DXDOI_RESOLVER_URL, GlobalId.HTTP_DOI_RESOLVER_URL);
-                index1 = (identifierString.startsWith(GlobalId.DOI_RESOLVER_URL)) ? GlobalId.DOI_RESOLVER_URL.length() - 1 : GlobalId.HTTP_DOI_RESOLVER_URL.length() - 1;
+                protocol = DOIServiceBean.DOI_PROTOCOL;
+                identifierString = identifierString.replace(DOIServiceBean.DXDOI_RESOLVER_URL, DOIServiceBean.DOI_RESOLVER_URL);
+                identifierString = identifierString.replace(DOIServiceBean.HTTP_DXDOI_RESOLVER_URL, DOIServiceBean.HTTP_DOI_RESOLVER_URL);
+                index1 = (identifierString.startsWith(DOIServiceBean.DOI_RESOLVER_URL)) ? DOIServiceBean.DOI_RESOLVER_URL.length() - 1 : DOIServiceBean.HTTP_DOI_RESOLVER_URL.length() - 1;
+                index2 = identifierString.indexOf("/", index1 + 1);
+            } else if (identifierString.startsWith(PermaLinkPidProviderServiceBean.PERMA_RESOLVER_URL + Dataset.TARGET_URL)) {
+                protocol = PermaLinkPidProviderServiceBean.PERMA_PROTOCOL;
+                index1 = PermaLinkPidProviderServiceBean.PERMA_RESOLVER_URL.length() + + Dataset.TARGET_URL.length() - 1; 
                 index2 = identifierString.indexOf("/", index1 + 1);
             } else {
                 logger.warning("HTTP Url in supplied as the identifier is neither a Handle nor DOI resolver: "+identifierString);
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
index f914bd363f2..c17ba909230 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java
@@ -36,12 +36,12 @@
 import edu.harvard.iq.dataverse.util.ConstraintViolationUtil;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
 import edu.harvard.iq.dataverse.util.json.JsonParser;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintWriter;
-import java.io.StringReader;
 import java.nio.file.Files;
 import java.util.ArrayList;
 import java.util.Date;
@@ -51,23 +51,22 @@
 import java.util.logging.Level;
 import java.util.logging.LogRecord;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
 import javax.xml.stream.XMLStreamException;
 import org.apache.commons.lang3.StringUtils;
 
@@ -259,12 +258,11 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve
                 throw new ImportException("Failed to transform XML metadata format "+metadataFormat+" into a DatasetDTO");
             }
         }
-        
-        JsonReader jsonReader = Json.createReader(new StringReader(json));
-        JsonObject obj = jsonReader.readObject();
+
+        JsonObject obj = JsonUtil.getJsonObject(json);
         //and call parse Json to read it into a dataset   
         try {
-            JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService);
+            JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, harvestingClient);
             parser.setLenient(true);
             Dataset ds = parser.parseDataset(obj);
 
@@ -325,26 +323,26 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve
             
             // A Global ID is required, in order for us to be able to harvest and import
             // this dataset:
-            if (StringUtils.isEmpty(ds.getGlobalIdString())) {
+            if (StringUtils.isEmpty(ds.getGlobalId().asString())) {
                 throw new ImportException("The harvested metadata record with the OAI server identifier "+harvestIdentifier+" does not contain a global unique identifier that we could recognize, skipping.");
             }
 
             ds.setHarvestedFrom(harvestingClient);
             ds.setHarvestIdentifier(harvestIdentifier);
             
-            Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalIdString());
+            Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId().asString());
 
             if (existingDs != null) {
                 // If this dataset already exists IN ANOTHER DATAVERSE
                 // we are just going to skip it!
                 if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) {
-                    throw new ImportException("The dataset with the global id "+ds.getGlobalIdString()+" already exists, in the dataverse "+existingDs.getOwner().getAlias()+", skipping.");
+                    throw new ImportException("The dataset with the global id "+ds.getGlobalId().asString()+" already exists, in the dataverse "+existingDs.getOwner().getAlias()+", skipping.");
                 }
                 // And if we already have a dataset with this same id, in this same
                 // dataverse, but it is  LOCAL dataset (can happen!), we're going to 
                 // skip it also: 
                 if (!existingDs.isHarvested()) {
-                    throw new ImportException("A LOCAL dataset with the global id "+ds.getGlobalIdString()+" already exists in this dataverse; skipping.");
+                    throw new ImportException("A LOCAL dataset with the global id "+ds.getGlobalId().asString()+" already exists in this dataverse; skipping.");
                 }
                 // For harvested datasets, there should always only be one version.
                 // We will replace the current version with the imported version.
@@ -396,10 +394,8 @@ public JsonObject ddiToJson(String xmlToParse) throws ImportException, XMLStream
         // convert DTO to Json,
         Gson gson = new GsonBuilder().setPrettyPrinting().create();
         String json = gson.toJson(dsDTO);
-        JsonReader jsonReader = Json.createReader(new StringReader(json));
-        JsonObject obj = jsonReader.readObject();
 
-        return obj;
+        return JsonUtil.getJsonObject(json);
     }
     
     public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
@@ -416,8 +412,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o
         // convert DTO to Json, 
         Gson gson = new GsonBuilder().setPrettyPrinting().create();
         String json = gson.toJson(dsDTO);
-        JsonReader jsonReader = Json.createReader(new StringReader(json));
-        JsonObject obj = jsonReader.readObject();
+        JsonObject obj = JsonUtil.getJsonObject(json);
         //and call parse Json to read it into a dataset   
         try {
             JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService);
@@ -427,8 +422,8 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o
             // For ImportType.NEW, if the user supplies a global identifier, and it's not a protocol
             // we support, it will be rejected.
             if (importType.equals(ImportType.NEW)) {
-                if (ds.getGlobalIdString() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
-                    throw new ImportException("Could not register id " + ds.getGlobalIdString() + ", protocol not supported");
+                if (ds.getGlobalId().asString() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
+                    throw new ImportException("Could not register id " + ds.getGlobalId().asString() + ", protocol not supported");
                 }
             }
 
@@ -497,7 +492,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o
             }
 
 
-            Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalIdString());
+            Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId().asString());
 
             if (existingDs != null) {
                 if (importType.equals(ImportType.HARVEST)) {
@@ -516,11 +511,11 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o
                     // check that the version number isn't already in the dataset
                     for (DatasetVersion dsv : existingDs.getVersions()) {
                         if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) {
-                            throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalIdString());
+                            throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId().asString());
                         }
                     }
                     DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0)));
-                    status = " created datasetVersion, for dataset "+ dsv.getDataset().getGlobalIdString();
+                    status = " created datasetVersion, for dataset "+ dsv.getDataset().getGlobalId().asString();
                     createdId = dsv.getId();
                 }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java b/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java
index aef17d1ab34..71a010b7e6d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilder.java
@@ -2,14 +2,14 @@
 
 import edu.harvard.iq.dataverse.api.ApiBlockingFilter;
 
-import javax.json.Json;
-import javax.json.JsonValue;
-import javax.json.JsonObjectBuilder;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 import org.apache.commons.lang3.exception.ExceptionUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java
index 15d1cb07a11..a2cf3082ae7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthFilter.java
@@ -8,14 +8,14 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.logging.SimpleFormatter;
-import javax.inject.Inject;
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.inject.Inject;
+import jakarta.servlet.Filter;
+import jakarta.servlet.FilterChain;
+import jakarta.servlet.FilterConfig;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.ServletRequest;
+import jakarta.servlet.ServletResponse;
+import jakarta.servlet.http.HttpServletRequest;
 
 public class AuthFilter implements Filter {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java
index 3715900733c..9cee3ec67c7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthTestDataServiceBean.java
@@ -15,7 +15,7 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
+import jakarta.ejb.Stateless;
 import org.apache.commons.lang3.StringUtils;
 
 @Stateless
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java
index 94a773bc977..3291dd2efbf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserLookup.java
@@ -2,17 +2,17 @@
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.UniqueConstraint;
 
 /**
  * A somewhat glorified key-value pair, persisted in the database.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java
index 6289865baf0..fbad14645bc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationProvidersRegistrationServiceBean.java
@@ -17,20 +17,21 @@
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProviderFactory;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Lock;
-import static javax.ejb.LockType.READ;
-import static javax.ejb.LockType.WRITE;
-import javax.ejb.Singleton;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Lock;
+import static jakarta.ejb.LockType.READ;
+import static jakarta.ejb.LockType.WRITE;
+import jakarta.ejb.Singleton;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
@@ -121,6 +122,15 @@ public void startup() {
                         logger.log(Level.SEVERE, "Exception setting up the authentication provider '" + row.getId() + "': " + ex.getMessage(), ex);
                     }
         });
+        
+        // Add providers registered via MPCONFIG
+        if (JvmSettings.OIDC_ENABLED.lookupOptional(Boolean.class).orElse(false)) {
+            try {
+                registerProvider(OIDCAuthenticationProviderFactory.buildFromSettings());
+            } catch (AuthorizationSetupException e) {
+                logger.log(Level.SEVERE, "Exception setting up an OIDC auth provider via MicroProfile Config", e);
+            }
+        }
     }
 
     private void registerProviderFactory(AuthenticationProviderFactory aFactory) 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
index 9bf53116efa..1c0f5010059 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
@@ -6,33 +6,29 @@
 import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
 import edu.harvard.iq.dataverse.UserNotificationServiceBean;
 import edu.harvard.iq.dataverse.UserServiceBean;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
 import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException;
-import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationProviderFactoryNotFoundException;
-import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationSetupException;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
 import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory;
-import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider;
-import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailData;
 import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean;
-import edu.harvard.iq.dataverse.engine.command.impl.RevokeAllRolesCommand;
 import edu.harvard.iq.dataverse.passwordreset.PasswordResetData;
 import edu.harvard.iq.dataverse.passwordreset.PasswordResetServiceBean;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
@@ -44,7 +40,6 @@
 import java.util.Calendar;
 import java.util.Collection;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -53,21 +48,21 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.persistence.TypedQuery;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
 
 /**
  * AuthenticationService is for general authentication-related operations.
@@ -127,6 +122,9 @@ public class AuthenticationServiceBean {
     @EJB
     SavedSearchServiceBean savedSearchService;
 
+    @EJB
+    PrivateUrlServiceBean privateUrlService;
+ 
     @PersistenceContext(unitName = "VDCNet-ejbPU")
     private EntityManager em;
         
@@ -589,7 +587,7 @@ public boolean updateProvider( AuthenticatedUser authenticatedUser, String authe
      * {@code userDisplayInfo}, a lookup entry for them based
      * UserIdentifier.getLookupStringPerAuthProvider (within the supplied
      * authentication provider), and internal user identifier (used for role
-     * assignments, etc.) based on UserIdentifier.getInternalUserIdentifer.
+     * assignments, etc.) based on UserIdentifier.getInternalUserIdentifier.
      *
      * @param userRecordId
      * @param proposedAuthenticatedUserIdentifier
@@ -614,20 +612,21 @@ public AuthenticatedUser createAuthenticatedUser(UserRecordIdentifier userRecord
             proposedAuthenticatedUserIdentifier = proposedAuthenticatedUserIdentifier.trim();
         }
         // we now select a username for the generated AuthenticatedUser, or give up
-        String internalUserIdentifer = proposedAuthenticatedUserIdentifier;
+        String internalUserIdentifier = proposedAuthenticatedUserIdentifier;
         // TODO should lock table authenticated users for write here
-        if ( identifierExists(internalUserIdentifer) ) {
+        if ( identifierExists(internalUserIdentifier) ) {
             if ( ! generateUniqueIdentifier ) {
                 return null;
             }
             int i=1;
-            String identifier = internalUserIdentifer + i;
+            String identifier = internalUserIdentifier + i;
             while ( identifierExists(identifier) ) {
                 i += 1;
+                identifier = internalUserIdentifier + i;
             }
             authenticatedUser.setUserIdentifier(identifier);
         } else {
-            authenticatedUser.setUserIdentifier(internalUserIdentifer);
+            authenticatedUser.setUserIdentifier(internalUserIdentifier);
         }
         authenticatedUser = save( authenticatedUser );
         // TODO should unlock table authenticated users for write here
@@ -940,14 +939,45 @@ public List <WorkflowComment> getWorkflowCommentsByAuthenticatedUser(Authenticat
         return query.getResultList();
     }
 
-    public ApiToken getValidApiTokenForUser(AuthenticatedUser user) {
+    /**
+     * This method gets a valid api token for an AuthenticatedUser, creating a new
+     * token if one doesn't exist or if the token is expired.
+     * 
+     * @param user
+     * @return
+     */
+    public ApiToken getValidApiTokenForAuthenticatedUser(AuthenticatedUser user) {
         ApiToken apiToken = null;
         apiToken = findApiTokenByUser(user);
-        if ((apiToken == null) || (apiToken.getExpireTime().before(new Date()))) {
+        if ((apiToken == null) || apiToken.isExpired()) {
             logger.fine("Created apiToken for user: " + user.getIdentifier());
             apiToken = generateApiTokenForUser(user);
         }
         return apiToken;
     }
 
+    /**
+     *  Gets a token for an AuthenticatedUser or a PrivateUrlUser. It will create a
+     *  new token if needed for an AuthenticatedUser. Note that, for a PrivateUrlUser, this method creates a token
+     *  with a temporary AuthenticateUser that only has a userIdentifier - needed in generating signed Urls.
+     * @param user
+     * @return a token or null (i.e. if the user is not an AuthenticatedUser or PrivateUrlUser)
+     */
+
+    public ApiToken getValidApiTokenForUser(User user) {
+        ApiToken apiToken = null;
+        if (user instanceof AuthenticatedUser) {
+            apiToken = getValidApiTokenForAuthenticatedUser((AuthenticatedUser) user);
+        } else if (user instanceof PrivateUrlUser) {
+            PrivateUrlUser privateUrlUser = (PrivateUrlUser) user;
+            
+            PrivateUrl privateUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId());
+            apiToken = new ApiToken();
+            apiToken.setTokenString(privateUrl.getToken());
+            AuthenticatedUser au = new AuthenticatedUser();
+            au.setUserIdentifier(privateUrlUser.getIdentifier());
+            apiToken.setAuthenticatedUser(au);
+        }
+        return apiToken;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
index 12ddf817221..ff1a5546f38 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRole.java
@@ -11,19 +11,19 @@
 import java.util.MissingResourceException;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
-import javax.validation.constraints.Pattern;
-import javax.validation.constraints.Size;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.Size;
 
 /**
  * A role is an annotated set of permissions. A role belongs
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java
index 4e6b54a8d49..966247bce2e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/DataverseRolePermissionHelper.java
@@ -4,21 +4,17 @@
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
-import edu.harvard.iq.dataverse.authorization.DataverseRole;
-import java.sql.Array;
-import java.util.AbstractMap;
+
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 import org.apache.commons.lang3.StringUtils;
 
 /*
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java
index 1ac2c7583d6..312910e52c7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/UserIdentifier.java
@@ -25,18 +25,31 @@ public class UserIdentifier {
     /**
      * The String used in the permission system to assign roles, for example.
      */
-    String internalUserIdentifer;
+    String internalUserIdentifier;
 
-    public UserIdentifier(String lookupStringPerAuthProvider, String internalUserIdentifer) {
+    public UserIdentifier(String lookupStringPerAuthProvider, String internalUserIdentifier) {
         this.lookupStringPerAuthProvider = lookupStringPerAuthProvider;
-        this.internalUserIdentifer = internalUserIdentifer;
+        this.internalUserIdentifier = internalUserIdentifier;
     }
 
     public String getLookupStringPerAuthProvider() {
         return lookupStringPerAuthProvider;
     }
 
+    /**
+     * @deprecated because of a typo; use {@link #getInternalUserIdentifier()} instead
+     * @see #getInternalUserIdentifier()
+     * @return the internal user identifier
+     */
+    @Deprecated
     public String getInternalUserIdentifer() {
-        return internalUserIdentifer;
+        return getInternalUserIdentifier();
+    }
+
+    /**
+     * @return the internal user identifier
+     */
+    public String getInternalUserIdentifier() {
+        return internalUserIdentifier;
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java b/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java
index 963ee592bbf..dfbb43fae46 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/UserRecordIdentifier.java
@@ -2,6 +2,8 @@
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 
+import java.util.Objects;
+
 /**
  * Identifies a user using two strings:
  * <ul>
@@ -38,4 +40,16 @@ public AuthenticatedUserLookup createAuthenticatedUserLookup( AuthenticatedUser
         return new AuthenticatedUserLookup(userIdInRepo, repoId, u);
     }
     
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (!(o instanceof UserRecordIdentifier)) return false;
+        UserRecordIdentifier that = (UserRecordIdentifier) o;
+        return Objects.equals(repoId, that.repoId) && Objects.equals(getUserIdInRepo(), that.getUserIdInRepo());
+    }
+    
+    @Override
+    public int hashCode() {
+        return Objects.hash(repoId, getUserIdInRepo());
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
index 98fe3ad18c3..a746eee0a60 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java
@@ -7,10 +7,13 @@
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupProvider;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
+import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup;
 import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupProvider;
 import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupsServiceBean;
+import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup;
 import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroupProvider;
 import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroupServiceBean;
+import edu.harvard.iq.dataverse.authorization.groups.impl.shib.ShibGroup;
 import edu.harvard.iq.dataverse.authorization.groups.impl.shib.ShibGroupProvider;
 import edu.harvard.iq.dataverse.authorization.groups.impl.shib.ShibGroupServiceBean;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -23,11 +26,11 @@
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.toSet;
 import java.util.stream.Stream;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
@@ -97,9 +100,49 @@ public MailDomainGroupProvider getMailDomainGroupProvider() {
      * @return The groups {@code req} is part of under {@code dvo}.
      */
     public Set<Group> groupsFor( DataverseRequest req, DvObject dvo ) {
-        return groupProviders.values().stream()
+        Set<Group> ret = groupProviders.values().stream()
                               .flatMap(gp->(Stream<Group>)gp.groupsFor(req, dvo).stream())
                               .collect(toSet());
+        
+        // ShibGroupProvider.groupsFor(), above, only returns the Shib Groups 
+        // (as you would expect), but not the Explicit Groups that may include them 
+        // (unlike the ExplicitGroupProvider, that returns all the ancestors too). 
+        // We appear to rely on this method returning all of the ancestor groups 
+        // for everything, so we need to perform some extra hacky steps in 
+        // order to obtain the ancestors for the shib groups as well:
+        
+        Set<ExplicitGroup> directAncestorsOfShibGroups = new HashSet<>();
+        for (Group group : ret) {
+
+            if (group instanceof ShibGroup 
+                    || group instanceof IpGroup 
+                    || group instanceof MailDomainGroup) {
+                // if this is one of the non-explicit group types above, we 
+                // need to find if it is included in some explicit group; i.e., 
+                // if it has direct ancestors that happen to be explicit groups:
+                
+                directAncestorsOfShibGroups.addAll(explicitGroupService.findDirectlyContainingGroups(group));
+            }
+        }
+        
+        if (!directAncestorsOfShibGroups.isEmpty()) {
+            // ... and now we can run the Monster Query in the ExplicitServiceBean
+            // that will find ALL the hierarchical explicit group ancestors of 
+            // these groups that include the shib groups fond
+            
+            Set<ExplicitGroup> allAncestorsOfShibGroups = explicitGroupService.findClosure(directAncestorsOfShibGroups);
+            
+            if (allAncestorsOfShibGroups != null) {
+                ret.addAll(allAncestorsOfShibGroups);
+            }
+        }
+        
+        // Perhaps the code above should be moved into the ShibGroupProvider (??)
+        // Also, this most likely applies not just to ShibGroups, but to the 
+        // all the groups that are not ExplicitGroups, i.e., IP- and domain-based 
+        // groups too. (??)
+        
+        return ret;
     }
     
     /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java
index 52785d5c7e2..1ef3b01d752 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/PersistedGlobalGroup.java
@@ -3,14 +3,14 @@
 import edu.harvard.iq.dataverse.authorization.groups.Group;
 import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo;
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * Convenience base class for implementing groups that apply to the entire Dataverse
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java
index 93de4480e55..2723561d8b4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroup.java
@@ -13,24 +13,24 @@
 import java.util.Objects;
 import java.util.Set;
 import java.util.TreeSet;
-import javax.persistence.Column;
-import javax.persistence.ElementCollection;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.JoinTable;
-import javax.persistence.ManyToMany;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.PostLoad;
-import javax.persistence.PrePersist;
-import javax.persistence.Table;
-import javax.persistence.Transient;
-import javax.validation.constraints.Pattern;
+import jakarta.persistence.Column;
+import jakarta.persistence.ElementCollection;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.JoinTable;
+import jakarta.persistence.ManyToMany;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.PostLoad;
+import jakarta.persistence.PrePersist;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.Pattern;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
@@ -61,7 +61,7 @@
     @NamedQuery( name="ExplicitGroup.findByAuthenticatedUserIdentifier",
                  query="SELECT eg FROM ExplicitGroup eg JOIN eg.containedAuthenticatedUsers au "
                      + "WHERE au.userIdentifier=:authenticatedUserIdentifier"),
-    @NamedQuery( name="ExplicitGroup.findByRoleAssgineeIdentifier",
+    @NamedQuery( name="ExplicitGroup.findByRoleAssigneeIdentifier",
                  query="SELECT eg FROM ExplicitGroup eg JOIN eg.containedRoleAssignees cra "
                      + "WHERE cra=:roleAssigneeIdentifier"),
     @NamedQuery( name="ExplicitGroup.findByContainedExplicitGroupId",
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java
index de9b9ba530d..a688fac0e34 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java
@@ -10,17 +10,16 @@
 import java.util.List;
 import java.util.Set;
 import java.util.TreeSet;
-import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 import static java.util.stream.Collectors.joining;
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 
 /**
  * A bean providing the {@link ExplicitGroupProvider}s with container services,
@@ -169,7 +168,7 @@ public Set<ExplicitGroup> findDirectlyContainingGroups( RoleAssignee ra ) {
         } else {
             return provider.updateProvider(
                     new HashSet<>(
-                            em.createNamedQuery("ExplicitGroup.findByRoleAssgineeIdentifier", ExplicitGroup.class)
+                            em.createNamedQuery("ExplicitGroup.findByRoleAssigneeIdentifier", ExplicitGroup.class)
                               .setParameter("roleAssigneeIdentifier", ra.getIdentifier())
                               .getResultList()
                   ));
@@ -198,7 +197,7 @@ public Set<ExplicitGroup> findGroups( RoleAssignee ra, DvObject o ) {
                 .filter( g -> g.owner.isAncestorOf(o) )
                 .collect( Collectors.toSet() );
     }
-    
+        
     /**
      * Finds all the groups {@code ra} directly belongs to in the context of {@code o}. In effect,
      * collects all the groups {@code ra} belongs to and that are defined at {@code o}
@@ -252,7 +251,7 @@ public Set<ExplicitGroup> findDirectGroups( RoleAssignee ra, DvObject o ) {
      * @param seed the initial set of groups.
      * @return Transitive closure (based on group  containment) of the groups in {@code seed}.
      */
-    protected Set<ExplicitGroup> findClosure( Set<ExplicitGroup> seed ) {
+    public Set<ExplicitGroup> findClosure( Set<ExplicitGroup> seed ) {
         
         if ( seed.isEmpty() ) return Collections.emptySet();
         
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java
index a3231557898..038fbbfc6e0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroup.java
@@ -12,12 +12,12 @@
 import java.util.HashSet;
 import java.util.Objects;
 import java.util.Set;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.Transient;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Transient;
 
 @NamedQueries({
     @NamedQuery(name="IpGroup.findAll",
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java
index c03cf26e11e..15282045b3a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java
@@ -10,12 +10,12 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 
 /**
  * Provides CRUD tools to efficiently manage IP groups in a Java EE container.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
index 3ecd7689e1c..8694b7d455b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4Range.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
 import java.math.BigInteger;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * A range of IPv4 addresses. In order to make SQL querying efficient, the actual fields
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java
index d1301d550c7..379c64a88cf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6Range.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java
index b71dbcd0eba..fc21397898f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRange.java
@@ -2,8 +2,8 @@
 
 import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup;
 import java.util.Objects;
-import javax.persistence.ManyToOne;
-import javax.persistence.MappedSuperclass;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.MappedSuperclass;
 
 /**
  * A range of {@link IpAddress}es. Abstract class - to instantiate, you need to
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java
index def11c57076..15b2fd1810c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroup.java
@@ -6,11 +6,11 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotEmpty;
+import jakarta.persistence.Entity;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotEmpty;
 ;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java
index 58e72b7b575..b1b1e883705 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBean.java
@@ -9,14 +9,14 @@
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import javax.ejb.*;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.ws.rs.NotFoundException;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.*;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.ws.rs.NotFoundException;
 
 /**
  * A bean providing the {@link MailDomainGroupProvider}s with container services, such as database connectivity.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java
index 3beb8dadedb..30850f0fb20 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroup.java
@@ -1,17 +1,16 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.shib;
 
-import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo;
 import edu.harvard.iq.dataverse.authorization.groups.Group;
 import edu.harvard.iq.dataverse.authorization.groups.GroupProvider;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Transient;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Transient;
 
 /**
  * Persistence for Shibboleth groups.
@@ -135,5 +134,4 @@ public RoleAssigneeDisplayInfo getDisplayInfo() {
     public boolean contains(DataverseRequest aRequest) {
         throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
     }
-
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java
index c15e56ee7e0..7a7844b7c1e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java
@@ -11,14 +11,14 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  * @todo Consider merging this bean into the newer and more generic
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java
index 6b9c545b7f9..2f37c777877 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/AuthenticationProviderRow.java
@@ -2,13 +2,13 @@
 
 import edu.harvard.iq.dataverse.authorization.AuthenticationProvider;
 import java.util.Objects;
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.Lob;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.Lob;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
 
 /**
  * Database-storable form of an {@code AuthenticationProvider}.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java
index c2510b8b043..2ce36997ea9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUser.java
@@ -5,20 +5,20 @@
 import edu.harvard.iq.dataverse.passwordreset.PasswordResetData;
 
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotBlank;
-import javax.validation.constraints.Size;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.Size;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java
index c39c7cb2985..ffbc5d7a027 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinUserServiceBean.java
@@ -9,17 +9,17 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
 
 /**
  *
@@ -88,7 +88,7 @@ public BuiltinUser findByUserName(String userName) {
             return em.createNamedQuery("BuiltinUser.findByUserName", BuiltinUser.class)
                     .setParameter("userName", userName)
                     .getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (NoResultException e) {
             return null;
         } catch (NonUniqueResultException ex) {
             logger.log(Level.WARNING, "multiple accounts found for username {0}", userName);
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
index 5c0f3a49f76..a0e3f899443 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPage.java
@@ -51,15 +51,15 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.event.ActionEvent;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.event.ActionEvent;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import org.apache.commons.lang3.StringUtils;
 import org.hibernate.validator.constraints.NotBlank;
@@ -488,6 +488,7 @@ public void displayNotification() {
                     break;
 
                 case REQUESTFILEACCESS:
+                case REQUESTEDFILEACCESS:
                     DataFile file = fileService.find(userNotification.getObjectId());
                     if (file != null) {
                         userNotification.setTheObject(file.getOwner());
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java
index 01139cd2e27..7fd7bf3e885 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/AbstractOAuth2AuthenticationProvider.java
@@ -14,7 +14,7 @@
 import edu.harvard.iq.dataverse.authorization.AuthenticationProviderDisplayInfo;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
-import javax.validation.constraints.NotNull;
+import jakarta.validation.constraints.NotNull;
 import java.io.IOException;
 import java.util.*;
 import java.util.concurrent.ExecutionException;
@@ -139,6 +139,7 @@ public OAuth20Service getService(String callbackUrl) {
      * Receive user data from OAuth2 provider after authn/z has been successfull. (Callback view uses this)
      * Request a token and access the resource, parse output and return user details.
      * @param code The authz code sent from the provider
+     * @param state The state which was communicated between us and the provider, identifying the exact request
      * @param redirectUrl The redirect URL (some providers require this when fetching the access token, e. g. Google)
      * @return A user record containing all user details accessible for us
      * @throws IOException Thrown when communication with the provider fails
@@ -146,7 +147,7 @@ public OAuth20Service getService(String callbackUrl) {
      * @throws InterruptedException Thrown when the requests thread is failing
      * @throws ExecutionException Thrown when the requests thread is failing
      */
-    public OAuth2UserRecord getUserRecord(String code, String redirectUrl)
+    public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl)
         throws IOException, OAuth2Exception, InterruptedException, ExecutionException {
         
         OAuth20Service service = getService(redirectUrl);
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java
index 54ba3ec6a05..821e8a5ea6c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2FirstLoginPage.java
@@ -30,14 +30,14 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.inject.Named;
-import javax.inject.Inject;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Named;
+import jakarta.inject.Inject;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java
index c5be41a014a..0fd0852b4df 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBean.java
@@ -20,12 +20,12 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.toList;
-import javax.ejb.EJB;
-import javax.inject.Named;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.servlet.http.HttpServletRequest;
-import javax.validation.constraints.NotNull;
+import jakarta.ejb.EJB;
+import jakarta.inject.Named;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.validation.constraints.NotNull;
 
 import static edu.harvard.iq.dataverse.util.StringUtil.toOption;
 import edu.harvard.iq.dataverse.util.SystemConfig;
@@ -100,7 +100,7 @@ public void exchangeCodeForToken() throws IOException {
 
             if (oIdp.isPresent() && code.isPresent()) {
                 AbstractOAuth2AuthenticationProvider idp = oIdp.get();
-                oauthUser = idp.getUserRecord(code.get(), systemConfig.getOAuth2CallbackUrl());
+                oauthUser = idp.getUserRecord(code.get(), req.getParameter("state"), systemConfig.getOAuth2CallbackUrl());
                 
                 // Throw an error if this authentication method is disabled:
                 // (it's not clear if it's possible at all, for somebody to get here with 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java
index a5ee5ddf537..59f659ff297 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenData.java
@@ -4,14 +4,14 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.io.Serializable;
 import java.sql.Timestamp;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
 
 /**
  * Token data for a given user, received from an OAuth2 system. Contains the 
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java
index d8f1fa7600b..b1dcb6df8cc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2TokenDataServiceBean.java
@@ -2,10 +2,10 @@
 
 import java.util.List;
 import java.util.Optional;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  * CRUD for {@link OAuth2TokenData}.
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java
index 62f3cc382e2..8829a25336b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2AP.java
@@ -9,9 +9,9 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.StringReader;
 import java.util.Collections;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 
 /**
  * IDP adaptor for GitHub.com
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java
index 1fa5470d551..a864ecb810a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java
@@ -8,9 +8,9 @@
 import java.io.StringReader;
 import java.util.Arrays;
 import java.util.UUID;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java
index da260a9fb0e..bd3caccc220 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/MicrosoftOAuth2AP.java
@@ -8,9 +8,9 @@
 import java.util.Collections;
 import java.util.logging.Logger;
 import java.io.StringReader;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java
index 02177ee0032..089ca40e164 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/OrcidOAuth2AP.java
@@ -23,10 +23,10 @@
 import static java.util.stream.Collectors.joining;
 import java.util.stream.IntStream;
 import java.util.stream.Stream;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.validation.constraints.NotNull;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.validation.constraints.NotNull;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
index a9c44010950..5eb2b391eb7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthProvider.java
@@ -1,5 +1,7 @@
 package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc;
 
+import com.github.benmanes.caffeine.cache.Cache;
+import com.github.benmanes.caffeine.cache.Caffeine;
 import com.github.scribejava.core.builder.api.DefaultApi20;
 import com.nimbusds.oauth2.sdk.AuthorizationCode;
 import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant;
@@ -18,6 +20,8 @@
 import com.nimbusds.oauth2.sdk.id.ClientID;
 import com.nimbusds.oauth2.sdk.id.Issuer;
 import com.nimbusds.oauth2.sdk.id.State;
+import com.nimbusds.oauth2.sdk.pkce.CodeChallengeMethod;
+import com.nimbusds.oauth2.sdk.pkce.CodeVerifier;
 import com.nimbusds.oauth2.sdk.token.BearerAccessToken;
 import com.nimbusds.openid.connect.sdk.AuthenticationRequest;
 import com.nimbusds.openid.connect.sdk.Nonce;
@@ -29,18 +33,25 @@
 import com.nimbusds.openid.connect.sdk.op.OIDCProviderConfigurationRequest;
 import com.nimbusds.openid.connect.sdk.op.OIDCProviderMetadata;
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
+import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
 import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationSetupException;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
 import java.io.IOException;
 import java.net.URI;
+import java.time.Duration;
+import java.time.temporal.ChronoUnit;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Map;
 import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutionException;
+import java.util.logging.Level;
 import java.util.logging.Logger;
 
 /**
@@ -54,15 +65,33 @@ public class OIDCAuthProvider extends AbstractOAuth2AuthenticationProvider {
     protected String title = "Open ID Connect";
     protected List<String> scope = Arrays.asList("openid", "email", "profile");
     
-    Issuer issuer;
-    ClientAuthentication clientAuth;
-    OIDCProviderMetadata idpMetadata;
+    final Issuer issuer;
+    final ClientAuthentication clientAuth;
+    final OIDCProviderMetadata idpMetadata;
+    final boolean pkceEnabled;
+    final CodeChallengeMethod pkceMethod;
     
-    public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL) throws AuthorizationSetupException {
+    /**
+     * Using PKCE, we create and send a special {@link CodeVerifier}. This contains a secret
+     * we need again when verifying the response by the provider, thus the cache.
+     * To be absolutely sure this may not be abused to DDoS us and not let unused verifiers rot,
+     * use an evicting cache implementation and not a standard map.
+     */
+    private final Cache<String,CodeVerifier> verifierCache = Caffeine.newBuilder()
+        .maximumSize(JvmSettings.OIDC_PKCE_CACHE_MAXSIZE.lookup(Integer.class))
+        .expireAfterWrite(Duration.of(JvmSettings.OIDC_PKCE_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.SECONDS))
+        .build();
+    
+    public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEndpointURL,
+                            boolean pkceEnabled, String pkceMethod) throws AuthorizationSetupException {
         this.clientSecret = aClientSecret; // nedded for state creation
         this.clientAuth = new ClientSecretBasic(new ClientID(aClientId), new Secret(aClientSecret));
         this.issuer = new Issuer(issuerEndpointURL);
-        getMetadata();
+        
+        this.idpMetadata = getMetadata();
+        
+        this.pkceEnabled = pkceEnabled;
+        this.pkceMethod = CodeChallengeMethod.parse(pkceMethod);
     }
     
     /**
@@ -74,7 +103,9 @@ public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEnd
      * @return false
      */
     @Override
-    public boolean isDisplayIdentifier() { return false; }
+    public boolean isDisplayIdentifier() {
+        return false;
+    }
     
     /**
      * Setup metadata from OIDC provider during creation of the provider representation
@@ -82,9 +113,14 @@ public OIDCAuthProvider(String aClientId, String aClientSecret, String issuerEnd
      * @throws IOException when sth. goes wrong with the retrieval
      * @throws ParseException when the metadata is not parsable
      */
-    void getMetadata() throws AuthorizationSetupException {
+    OIDCProviderMetadata getMetadata() throws AuthorizationSetupException {
         try {
-            this.idpMetadata = getMetadata(this.issuer);
+            var metadata = getMetadata(this.issuer);
+            // Assert that the provider supports the code flow
+            if (metadata.getResponseTypes().stream().noneMatch(ResponseType::impliesCodeFlow)) {
+                throw new AuthorizationSetupException("OIDC provider at "+this.issuer.getValue()+" does not support code flow, disabling.");
+            }
+            return metadata;
         } catch (IOException ex) {
             logger.severe("OIDC provider metadata at \"+issuerEndpointURL+\" not retrievable: "+ex.getMessage());
             throw new AuthorizationSetupException("OIDC provider metadata at "+this.issuer.getValue()+" not retrievable.");
@@ -92,11 +128,6 @@ void getMetadata() throws AuthorizationSetupException {
             logger.severe("OIDC provider metadata at \"+issuerEndpointURL+\" not parsable: "+ex.getMessage());
             throw new AuthorizationSetupException("OIDC provider metadata at "+this.issuer.getValue()+" not parsable.");
         }
-    
-        // Assert that the provider supports the code flow
-        if (! this.idpMetadata.getResponseTypes().stream().filter(idp -> idp.impliesCodeFlow()).findAny().isPresent()) {
-            throw new AuthorizationSetupException("OIDC provider at "+this.issuer.getValue()+" does not support code flow, disabling.");
-        }
     }
     
     /**
@@ -145,6 +176,7 @@ public String buildAuthzUrl(String state, String callbackUrl) {
         State stateObject = new State(state);
         URI callback = URI.create(callbackUrl);
         Nonce nonce = new Nonce();
+        CodeVerifier pkceVerifier = pkceEnabled ? new CodeVerifier() : null;
         
         AuthenticationRequest req = new AuthenticationRequest.Builder(new ResponseType("code"),
                                                                       Scope.parse(this.scope),
@@ -152,9 +184,17 @@ public String buildAuthzUrl(String state, String callbackUrl) {
                                                                       callback)
             .endpointURI(idpMetadata.getAuthorizationEndpointURI())
             .state(stateObject)
+            // Called method is nullsafe - will disable sending a PKCE challenge in case the verifier is not present
+            .codeChallenge(pkceVerifier, pkceMethod)
             .nonce(nonce)
             .build();
         
+        // Cache the PKCE verifier, as we need the secret in it for verification later again, after the client sends us
+        // the auth code! We use the state to cache the verifier, as the state is unique per authentication event.
+        if (pkceVerifier != null) {
+            this.verifierCache.put(state, pkceVerifier);
+        }
+        
         return req.toURI().toString();
     }
     
@@ -170,10 +210,14 @@ public String buildAuthzUrl(String state, String callbackUrl) {
      * @throws ExecutionException Thrown when the requests thread is failing
      */
     @Override
-    public OAuth2UserRecord getUserRecord(String code, String redirectUrl)
-        throws IOException, OAuth2Exception, InterruptedException, ExecutionException {
-        // Create grant object
-        AuthorizationGrant codeGrant = new AuthorizationCodeGrant(new AuthorizationCode(code), URI.create(redirectUrl));
+    public OAuth2UserRecord getUserRecord(String code, String state, String redirectUrl) throws IOException, OAuth2Exception {
+        // Retrieve the verifier from the cache and clear from the cache. If not found, will be null.
+        // Will be sent to token endpoint for verification, so if required but missing, will lead to exception.
+        CodeVerifier verifier = verifierCache.getIfPresent(state);
+        
+        // Create grant object - again, this is null-safe for the verifier
+        AuthorizationGrant codeGrant = new AuthorizationCodeGrant(
+            new AuthorizationCode(code), URI.create(redirectUrl), verifier);
     
         // Get Access Token first
         Optional<BearerAccessToken> accessToken = getAccessToken(codeGrant);
@@ -272,4 +316,44 @@ Optional<UserInfo> getUserInfo(BearerAccessToken accessToken) throws IOException
             throw new OAuth2Exception(-1, ex.getMessage(), BundleUtil.getStringFromBundle("auth.providers.exception.userinfo", Arrays.asList(this.getTitle())));
         }
     }
+
+    /**
+     * Trades an access token for an {@link UserRecordIdentifier} (if valid).
+     *
+     * @apiNote The resulting {@link UserRecordIdentifier} may be used with
+     *          {@link edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean#lookupUser(UserRecordIdentifier)}
+     *          to look up an {@link edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser} from the database.
+     * @see edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism
+     *
+     * @param accessToken The token to use when requesting user information from the provider
+     * @return Returns an {@link UserRecordIdentifier} for a valid access token or an empty {@link Optional}.
+     * @throws IOException In case communication with the endpoint fails to succeed for an I/O reason
+     */
+    public Optional<UserRecordIdentifier> getUserIdentifier(BearerAccessToken accessToken) throws IOException {
+        OAuth2UserRecord userRecord;
+        try {
+            // Try to retrieve with given token (throws if invalid token)
+            Optional<UserInfo> userInfo = getUserInfo(accessToken);
+            
+            if (userInfo.isPresent()) {
+                // Take this detour to avoid code duplication and potentially hard to track conversion errors.
+                userRecord = getUserRecord(userInfo.get());
+            } else {
+                // This should not happen - an error at the provider side will lead to an exception.
+                logger.log(Level.WARNING,
+                    "User info retrieval from {0} returned empty optional but expected exception for token {1}.",
+                    List.of(getId(), accessToken).toArray()
+                );
+                return Optional.empty();
+            }
+        } catch (OAuth2Exception e) {
+            logger.log(Level.FINE,
+                "Could not retrieve user info with token {0} at provider {1}: {2}",
+                List.of(accessToken, getId(), e.getMessage()).toArray());
+            logger.log(Level.FINER, "Retrieval failed, details as follows: ", e);
+            return Optional.empty();
+        }
+        
+        return Optional.of(userRecord.getUserRecordIdentifier());
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java
index c6d1a28e19d..3f8c18d0567 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactory.java
@@ -5,6 +5,7 @@
 import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory;
 import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2AuthenticationProviderFactory;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 
 import java.util.Map;
 
@@ -37,11 +38,39 @@ public String getInfo() {
     public AuthenticationProvider buildProvider( AuthenticationProviderRow aRow ) throws AuthorizationSetupException {
         Map<String, String> factoryData = OAuth2AuthenticationProviderFactory.parseFactoryData(aRow.getFactoryData());
         
-        OIDCAuthProvider oidc = new OIDCAuthProvider(factoryData.get("clientId"), factoryData.get("clientSecret"), factoryData.get("issuer"));
+        OIDCAuthProvider oidc = new OIDCAuthProvider(
+            factoryData.get("clientId"),
+            factoryData.get("clientSecret"),
+            factoryData.get("issuer"),
+            Boolean.parseBoolean(factoryData.getOrDefault("pkceEnabled", "false")),
+            factoryData.getOrDefault("pkceMethod", "S256")
+        );
+        
         oidc.setId(aRow.getId());
         oidc.setTitle(aRow.getTitle());
         oidc.setSubTitle(aRow.getSubtitle());
         
         return oidc;
     }
+    
+    /**
+     * Build an OIDC provider from MicroProfile Config provisioned details
+     * @return The configured auth provider
+     * @throws AuthorizationSetupException
+     */
+    public static AuthenticationProvider buildFromSettings() throws AuthorizationSetupException {
+        OIDCAuthProvider oidc = new OIDCAuthProvider(
+            JvmSettings.OIDC_CLIENT_ID.lookup(),
+            JvmSettings.OIDC_CLIENT_SECRET.lookup(),
+            JvmSettings.OIDC_AUTH_SERVER_URL.lookup(),
+            JvmSettings.OIDC_PKCE_ENABLED.lookupOptional(Boolean.class).orElse(false),
+            JvmSettings.OIDC_PKCE_METHOD.lookupOptional().orElse("S256")
+        );
+        
+        oidc.setId("oidc-mpconfig");
+        oidc.setTitle(JvmSettings.OIDC_TITLE.lookupOptional().orElse("OpenID Connect"));
+        oidc.setSubTitle(JvmSettings.OIDC_SUBTITLE.lookupOptional().orElse("OpenID Connect"));
+        
+        return oidc;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
index 3e986a15689..0921b2c6683 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibServiceBean.java
@@ -4,17 +4,11 @@
 import com.google.gson.JsonElement;
 import com.google.gson.JsonParser;
 import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean;
-import edu.harvard.iq.dataverse.authorization.AuthenticationRequest;
-import edu.harvard.iq.dataverse.authorization.AuthenticationResponse;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
-import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.PasswordEncryption;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.IOException;
 import java.io.InputStream;
@@ -25,11 +19,11 @@
 import java.util.Map;
 import java.util.UUID;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 @Named
 @Stateless
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
index f8b30710656..4cf41903405 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtil.java
@@ -12,7 +12,7 @@
 import java.util.Map;
 import java.util.UUID;
 import java.util.logging.Logger;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 
 public class ShibUtil {
 
@@ -133,7 +133,24 @@ public static String findSingleValue(String mayHaveMultipleValues) {
         return singleValue;
     }
 
+    /**
+     * @deprecated because of a typo; use {@link #generateFriendlyLookingUserIdentifier(String, String)} instead
+     * @see #generateFriendlyLookingUserIdentifier(String, String)
+     * @param usernameAssertion
+     * @param email
+     * @return a friendly-looking user identifier based on the asserted username or email, or a UUID as fallback
+     */
+    @Deprecated
     public static String generateFriendlyLookingUserIdentifer(String usernameAssertion, String email) {
+        return generateFriendlyLookingUserIdentifier(usernameAssertion, email);
+    }
+
+    /**
+     * @param usernameAssertion
+     * @param email
+     * @return a friendly-looking user identifier based on the asserted username or email, or a UUID as fallback
+     */
+    public static String generateFriendlyLookingUserIdentifier(String usernameAssertion, String email) {
         if (usernameAssertion != null && !usernameAssertion.isEmpty()) {
             return usernameAssertion;
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java
index fc7ed8a9060..0de7d7754a1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/ApiToken.java
@@ -2,18 +2,18 @@
 
 import java.io.Serializable;
 import java.sql.Timestamp;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.Table;
-import javax.validation.constraints.NotNull;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.Table;
+import jakarta.validation.constraints.NotNull;
 
 @Entity
 @NamedQueries({
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
index 9fdfce2f1a7..3cbfc3cdcac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUser.java
@@ -1,7 +1,9 @@
 package edu.harvard.iq.dataverse.authorization.users;
 
 import edu.harvard.iq.dataverse.Cart;
+import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.DatasetLock;
+import edu.harvard.iq.dataverse.FileAccessRequest;
 import edu.harvard.iq.dataverse.UserNotification.Type;
 import edu.harvard.iq.dataverse.UserNotification;
 import edu.harvard.iq.dataverse.validation.ValidateEmail;
@@ -17,29 +19,31 @@
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 import java.io.Serializable;
 import java.sql.Timestamp;
+import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.PostLoad;
-import javax.persistence.PrePersist;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotNull;
-import org.hibernate.validator.constraints.NotBlank;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.PostLoad;
+import jakarta.persistence.PrePersist;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.NotNull;
 
 /**
  * When adding an attribute to this class, be sure to update the following:
@@ -202,6 +206,29 @@ public void setDatasetLocks(List<DatasetLock> datasetLocks) {
     @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
     private List<OAuth2TokenData> oAuth2TokenDatas;
 
+    /*for many to many fileAccessRequests*/
+    @OneToMany(mappedBy = "user", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH}, fetch = FetchType.LAZY)
+    private List<FileAccessRequest> fileAccessRequests;
+
+    public List<FileAccessRequest> getFileAccessRequests() {
+        return fileAccessRequests;
+    }
+
+    public void setFileAccessRequests(List<FileAccessRequest> fARs) {
+        this.fileAccessRequests = fARs;
+    }
+
+    public List<DataFile> getRequestedDataFiles(){
+        List<DataFile> requestedDataFiles = new ArrayList<>();
+
+        for(FileAccessRequest far : getFileAccessRequests()){
+            if(far.isStateCreated()) {
+                requestedDataFiles.add(far.getDataFile());
+            }
+        }
+        return requestedDataFiles;
+    }
+    
     @Override
     public AuthenticatedUserDisplayInfo getDisplayInfo() {
         return new AuthenticatedUserDisplayInfo(firstName, lastName, email, affiliation, position);
diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java b/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java
index f64b5c301e7..03f018221fd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/authorization/users/PrivateUrlUser.java
@@ -12,7 +12,7 @@
  */
 public class PrivateUrlUser implements User {
 
-    public static final String PREFIX = "#";
+    public static final String PREFIX = "!";
 
     /**
      * In the future, this could probably be dvObjectId rather than datasetId,
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java b/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java
index be2167fa4d5..debece131d3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/entities/JobExecutionEntity.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.batch.entities;
 
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.BatchStatus;
-import javax.batch.runtime.JobExecution;
-import javax.batch.runtime.StepExecution;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.BatchStatus;
+import jakarta.batch.runtime.JobExecution;
+import jakarta.batch.runtime.StepExecution;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.LinkedHashMap;
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java b/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java
index 65ed3f32e1b..ba20386ed07 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/entities/StepExecutionEntity.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.batch.entities;
 
-import javax.batch.runtime.BatchStatus;
-import javax.batch.runtime.Metric;
-import javax.batch.runtime.StepExecution;
+import jakarta.batch.runtime.BatchStatus;
+import jakarta.batch.runtime.Metric;
+import jakarta.batch.runtime.StepExecution;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java
index a5ba9a00bd2..a2f76150d7b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordJobListener.java
@@ -40,22 +40,22 @@
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
 
-import javax.batch.api.BatchProperty;
-import javax.batch.api.chunk.listener.ItemReadListener;
-import javax.batch.api.listener.JobListener;
-import javax.batch.api.listener.StepListener;
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.BatchStatus;
-import javax.batch.runtime.JobExecution;
-import javax.batch.runtime.StepExecution;
-import javax.batch.runtime.context.JobContext;
-import javax.batch.runtime.context.StepContext;
-import javax.ejb.EJB;
-import javax.enterprise.context.Dependent;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.batch.api.BatchProperty;
+import jakarta.batch.api.chunk.listener.ItemReadListener;
+import jakarta.batch.api.listener.JobListener;
+import jakarta.batch.api.listener.StepListener;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.BatchStatus;
+import jakarta.batch.runtime.JobExecution;
+import jakarta.batch.runtime.StepExecution;
+import jakarta.batch.runtime.context.JobContext;
+import jakarta.batch.runtime.context.StepContext;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import org.apache.commons.io.IOUtils;
@@ -74,8 +74,8 @@
 import java.util.logging.Handler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.batch.operations.JobSecurityException;
-import javax.batch.operations.NoSuchJobExecutionException;
+import jakarta.batch.operations.JobSecurityException;
+import jakarta.batch.operations.NoSuchJobExecutionException;
 
 @Named
 @Dependent
@@ -450,7 +450,7 @@ private void loadChecksumManifest() {
         // We probably want package files to be able to use specific stores instead.
         // More importantly perhaps, the approach above does not take into account
         // if the dataset may have an AlternativePersistentIdentifier, that may be 
-        // designated isStorageLocationDesignator() - i.e., if a different identifer
+        // designated isStorageLocationDesignator() - i.e., if a different identifier
         // needs to be used to name the storage directory, instead of the main/current
         // persistent identifier above. 
         getJobLogger().log(Level.INFO, "Reading checksum manifest: " + manifestAbsolutePath);
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java
index af7caf32a7c..e5db80b9aa6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordProcessor.java
@@ -25,15 +25,15 @@
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.DatasetVersion;
 
-import javax.annotation.PostConstruct;
-import javax.batch.api.chunk.ItemProcessor;
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.context.JobContext;
-import javax.ejb.EJB;
-import javax.enterprise.context.Dependent;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.batch.api.chunk.ItemProcessor;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.context.JobContext;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.io.File;
 import java.util.Properties;
 import java.util.logging.Level;
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java
index a4f8ffd2378..9ce30683a87 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordReader.java
@@ -28,17 +28,17 @@
 import org.apache.commons.io.filefilter.NotFileFilter;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
 
-import javax.annotation.PostConstruct;
-import javax.batch.api.BatchProperty;
-import javax.batch.api.chunk.AbstractItemReader;
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.context.JobContext;
-import javax.batch.runtime.context.StepContext;
-import javax.ejb.EJB;
-import javax.enterprise.context.Dependent;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.batch.api.BatchProperty;
+import jakarta.batch.api.chunk.AbstractItemReader;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.context.JobContext;
+import jakarta.batch.runtime.context.StepContext;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.io.File;
 import java.io.FileFilter;
 import java.io.Serializable;
@@ -109,7 +109,7 @@ public void open(Serializable checkpoint) throws Exception {
         // We probably want package files to be able to use specific stores instead.
         // More importantly perhaps, the approach above does not take into account
         // if the dataset may have an AlternativePersistentIdentifier, that may be 
-        // designated isStorageLocationDesignator() - i.e., if a different identifer
+        // designated isStorageLocationDesignator() - i.e., if a different identifier
         // needs to be used to name the storage directory, instead of the main/current
         // persistent identifier above. 
         getJobLogger().log(Level.INFO, "Reading dataset directory: " + directory.getAbsolutePath() 
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java
index c82a5bb01eb..ba34a3d1ed1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/jobs/importer/filesystem/FileRecordWriter.java
@@ -36,17 +36,17 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.FileUtil;
 
-import javax.annotation.PostConstruct;
-import javax.batch.api.BatchProperty;
-import javax.batch.api.chunk.AbstractItemWriter;
-import javax.batch.operations.JobOperator;
-import javax.batch.runtime.BatchRuntime;
-import javax.batch.runtime.context.JobContext;
-import javax.batch.runtime.context.StepContext;
-import javax.ejb.EJB;
-import javax.enterprise.context.Dependent;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.batch.api.BatchProperty;
+import jakarta.batch.api.chunk.AbstractItemWriter;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.batch.runtime.context.JobContext;
+import jakarta.batch.runtime.context.StepContext;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.Dependent;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import java.io.File;
 import java.io.Serializable;
 import java.sql.Timestamp;
@@ -57,7 +57,7 @@
 import java.util.Properties;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 import edu.harvard.iq.dataverse.GlobalIdServiceBean;
 
 @Named
@@ -358,12 +358,11 @@ private DataFile createPackageDataFile(List<File> files) {
         dataset.getLatestVersion().getFileMetadatas().add(fmd);
         fmd.setDatasetVersion(dataset.getLatestVersion());
         
-	String isFilePIDsEnabled = commandEngine.getContext().settings().getValueForKey(SettingsServiceBean.Key.FilePIDsEnabled, "true"); //default value for file PIDs is 'true'
-	if ("true".contentEquals( isFilePIDsEnabled )) {
-	
+    if (commandEngine.getContext().systemConfig().isFilePIDsEnabledForCollection(dataset.getOwner())) {
+
         GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(packageFile.getProtocol(), commandEngine.getContext());
         if (packageFile.getIdentifier() == null || packageFile.getIdentifier().isEmpty()) {
-            packageFile.setIdentifier(dataFileServiceBean.generateDataFileIdentifier(packageFile, idServiceBean));
+            packageFile.setIdentifier(idServiceBean.generateDataFileIdentifier(packageFile));
         }
         String nonNullDefaultIfKeyNotFound = "";
         String protocol = commandEngine.getContext().settings().getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
diff --git a/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java b/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java
index a2f76ca953d..19d1112ba54 100644
--- a/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/batch/util/LoggingUtil.java
@@ -25,7 +25,7 @@
 import edu.harvard.iq.dataverse.engine.command.Command;
 import org.apache.commons.io.FileUtils;
 
-import javax.batch.runtime.JobExecution;
+import jakarta.batch.runtime.JobExecution;
 import java.io.File;
 import java.io.IOException;
 import java.text.SimpleDateFormat;
diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java
index 3cb071fe03f..c230229abf9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtil.java
@@ -6,7 +6,7 @@
 import java.util.Arrays;
 import java.util.logging.Logger;
 
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.InternetAddress;
 
 public class BrandingUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java
index 274970f8b8e..7729ab4763e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/branding/BrandingUtilHelper.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.branding;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java
index c05750c13e6..0ad9ab59f4b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java
+++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailData.java
@@ -5,17 +5,17 @@
 import java.sql.Timestamp;
 import java.util.Date;
 import java.util.UUID;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java
index 07aea0d5011..b76e3db1379 100644
--- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailPage.java
@@ -7,10 +7,10 @@
 import edu.harvard.iq.dataverse.util.JsfHelper;
 import java.util.Arrays;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java
index e1053c3a93f..a54fd6bb0c1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailServiceBean.java
@@ -1,6 +1,5 @@
 package edu.harvard.iq.dataverse.confirmemail;
 
-import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
@@ -16,13 +15,13 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java
index 54e3114a0ae..6fc80312bf5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardDatamovePage.java
@@ -8,7 +8,6 @@
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
 import edu.harvard.iq.dataverse.PermissionsWrapper;
 import edu.harvard.iq.dataverse.SettingsWrapper;
-import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
@@ -22,16 +21,16 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.servlet.http.HttpServletRequest;
 
 @ViewScoped
 @Named("DashboardDatamovePage")
diff --git a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java
index 5b5a21e21bf..477e4c0fdd6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dashboard/DashboardUsersPage.java
@@ -24,10 +24,10 @@
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 @ViewScoped
 @Named("DashboardUsersPage")
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java
new file mode 100644
index 00000000000..10ff68a56f3
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/AbstractRemoteOverlayAccessIO.java
@@ -0,0 +1,344 @@
+package edu.harvard.iq.dataverse.dataaccess;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.nio.channels.Channel;
+import java.nio.file.Path;
+import java.security.KeyManagementException;
+import java.security.KeyStoreException;
+import java.security.NoSuchAlgorithmException;
+import java.util.List;
+import java.util.function.Predicate;
+import java.util.logging.Logger;
+
+import javax.net.ssl.SSLContext;
+
+import org.apache.http.client.config.CookieSpecs;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.config.Registry;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.conn.socket.ConnectionSocketFactory;
+import org.apache.http.conn.ssl.NoopHostnameVerifier;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.conn.ssl.TrustAllStrategy;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
+import org.apache.http.ssl.SSLContextBuilder;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.DvObject;
+
+
+/**
+ * A base class for StorageIO implementations supporting remote access. At present, that includes the RemoteOverlayAccessIO store and the newer GlobusOverlayAccessIO store. It primarily includes
+ * common methods for handling auxiliary files in the configured base store.
+ * @param <T>
+ */
+public abstract class AbstractRemoteOverlayAccessIO<T extends DvObject> extends StorageIO<T> {
+
+    protected static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO");
+    public static final String REFERENCE_ENDPOINTS_WITH_BASEPATHS = "reference-endpoints-with-basepaths";
+    static final String BASE_STORE = "base-store";
+    protected static final String SECRET_KEY = "secret-key";
+    static final String URL_EXPIRATION_MINUTES = "url-expiration-minutes";
+    protected static final String REMOTE_STORE_NAME = "remote-store-name";
+    protected static final String REMOTE_STORE_URL = "remote-store-url";
+    
+    // Whether Dataverse can access the file bytes
+    // Currently False only for the Globus store when using the S3Connector, and Remote Stores like simple web servers where the URLs resolve to the actual file bits
+    static final String FILES_NOT_ACCESSIBLE_BY_DATAVERSE = "files-not-accessible-by-dataverse";
+
+    protected StorageIO<DvObject> baseStore = null;
+    protected String path = null;
+    protected PoolingHttpClientConnectionManager cm = null;
+    CloseableHttpClient httpclient = null;
+    protected static HttpClientContext localContext = HttpClientContext.create();
+
+    protected int timeout = 1200;
+    protected RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000)
+                .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000)
+                .setCookieSpec(CookieSpecs.STANDARD).setExpectContinueEnabled(true).build();
+    protected static boolean trustCerts = false;
+    protected int httpConcurrency = 4;
+
+    public static String getBaseStoreIdFor(String driverId) {
+        return getConfigParamForDriver(driverId, BASE_STORE);
+    }
+
+    public AbstractRemoteOverlayAccessIO() {
+        super();
+    }
+
+    public AbstractRemoteOverlayAccessIO(String storageLocation, String driverId) {
+        super(storageLocation, driverId);
+    }
+
+    public AbstractRemoteOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) {
+        super(dvObject, req, driverId);
+    }
+
+    @Override
+    public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) throws IOException {
+        return baseStore.openAuxChannel(auxItemTag, options);
+    }
+
+    @Override
+    public boolean isAuxObjectCached(String auxItemTag) throws IOException {
+        return baseStore.isAuxObjectCached(auxItemTag);
+    }
+
+    @Override
+    public long getAuxObjectSize(String auxItemTag) throws IOException {
+        return baseStore.getAuxObjectSize(auxItemTag);
+    }
+
+    @Override
+    public Path getAuxObjectAsPath(String auxItemTag) throws IOException {
+        return baseStore.getAuxObjectAsPath(auxItemTag);
+    }
+
+    @Override
+    public void backupAsAux(String auxItemTag) throws IOException {
+        baseStore.backupAsAux(auxItemTag);
+    }
+
+    @Override
+    public void revertBackupAsAux(String auxItemTag) throws IOException {
+        baseStore.revertBackupAsAux(auxItemTag);
+    }
+
+    @Override
+    public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException {
+        baseStore.savePathAsAux(fileSystemPath, auxItemTag);
+    }
+
+    @Override
+    public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException {
+        baseStore.saveInputStreamAsAux(inputStream, auxItemTag, filesize);
+    }
+
+    /**
+     * @param inputStream InputStream we want to save
+     * @param auxItemTag  String representing this Auxiliary type ("extension")
+     * @throws IOException if anything goes wrong.
+     */
+    @Override
+    public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException {
+        baseStore.saveInputStreamAsAux(inputStream, auxItemTag);
+    }
+
+    @Override
+    public List<String> listAuxObjects() throws IOException {
+        return baseStore.listAuxObjects();
+    }
+
+    @Override
+    public void deleteAuxObject(String auxItemTag) throws IOException {
+        baseStore.deleteAuxObject(auxItemTag);
+    }
+
+    @Override
+    public void deleteAllAuxObjects() throws IOException {
+        baseStore.deleteAllAuxObjects();
+    }
+
+    @Override
+    public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException {
+        return baseStore.getAuxFileAsInputStream(auxItemTag);
+    }
+
+    protected int getUrlExpirationMinutes() {
+        String optionValue = getConfigParam(URL_EXPIRATION_MINUTES);
+        if (optionValue != null) {
+            Integer num;
+            try {
+                num = Integer.parseInt(optionValue);
+            } catch (NumberFormatException ex) {
+                num = null;
+            }
+            if (num != null) {
+                return num;
+            }
+        }
+        return 60;
+    }
+
+    public CloseableHttpClient getSharedHttpClient() {
+        if (httpclient == null) {
+            try {
+                initHttpPool();
+                httpclient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(config).build();
+    
+            } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) {
+                logger.warning(ex.getMessage());
+            }
+        }
+        return httpclient;
+    }
+
+    private void initHttpPool() throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException {
+        if (trustCerts) {
+            // use the TrustSelfSignedStrategy to allow Self Signed Certificates
+            SSLContext sslContext;
+            SSLConnectionSocketFactory connectionFactory;
+    
+            sslContext = SSLContextBuilder.create().loadTrustMaterial(new TrustAllStrategy()).build();
+            // create an SSL Socket Factory to use the SSLContext with the trust self signed
+            // certificate strategy
+            // and allow all hosts verifier.
+            connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE);
+    
+            Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory>create()
+                    .register("https", connectionFactory).build();
+            cm = new PoolingHttpClientConnectionManager(registry);
+        } else {
+            cm = new PoolingHttpClientConnectionManager();
+        }
+        cm.setDefaultMaxPerRoute(httpConcurrency);
+        cm.setMaxTotal(httpConcurrency > 20 ? httpConcurrency : 20);
+    }
+
+    @Override
+    abstract public long retrieveSizeFromMedia();
+    
+    @Override
+    public boolean exists() {
+        logger.fine("Exists called");
+        return (retrieveSizeFromMedia() != -1);
+    }
+
+    @Override
+    public List<String> cleanUp(Predicate<String> filter, boolean dryRun) throws IOException {
+        return baseStore.cleanUp(filter, dryRun);
+    }
+    
+    @Override
+    public String getStorageLocation() throws IOException {
+        String fullStorageLocation = dvObject.getStorageIdentifier();
+        logger.fine("storageidentifier: " + fullStorageLocation);
+        int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR);
+        if (driverIndex >= 0) {
+            fullStorageLocation = fullStorageLocation
+                    .substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length());
+        }
+        if (this.getDvObject() instanceof Dataset) {
+            throw new IOException("AbstractRemoteOverlayAccessIO: Datasets are not a supported dvObject");
+        } else if (this.getDvObject() instanceof DataFile) {
+            fullStorageLocation = StorageIO.getDriverPrefix(this.driverId) + fullStorageLocation;
+        } else if (dvObject instanceof Dataverse) {
+            throw new IOException("AbstractRemoteOverlayAccessIO: Dataverses are not a supported dvObject");
+        }
+        logger.fine("fullStorageLocation: " + fullStorageLocation);
+        return fullStorageLocation;
+    }
+    protected void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException {
+
+        if (baseStore == null) {
+            String baseDriverId = getBaseStoreIdFor(driverId);
+            String fullStorageLocation = null;
+            String baseDriverType = getConfigParamForDriver(baseDriverId, StorageIO.TYPE,
+                    DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
+
+            if (dvObject instanceof Dataset) {
+                baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId);
+            } else {
+                if (this.getDvObject() != null) {
+                    fullStorageLocation = getStoragePath();
+
+                    // S3 expects <id>://<bucketname>/<key>
+                    switch (baseDriverType) {
+                    case DataAccess.S3:
+                        fullStorageLocation = baseDriverId + DataAccess.SEPARATOR
+                                + getConfigParamForDriver(baseDriverId, S3AccessIO.BUCKET_NAME) + "/"
+                                + fullStorageLocation;
+                        break;
+                    case DataAccess.FILE:
+                        fullStorageLocation = baseDriverId + DataAccess.SEPARATOR
+                                + getConfigParamForDriver(baseDriverId, FileAccessIO.DIRECTORY, "/tmp/files")
+                                + "/" + fullStorageLocation;
+                        break;
+                    default:
+                        logger.warning("Not Supported: " + this.getClass().getName() + " store with base store type: "
+                                + getConfigParamForDriver(baseDriverId, StorageIO.TYPE));
+                        throw new IOException("Not supported");
+                    }
+
+                } else if (storageLocation != null) {
+                    // <remoteDriverId>://<baseStorageIdentifier>//<baseUrlPath>
+                    // remoteDriverId:// is removed if coming through directStorageIO
+                    int index = storageLocation.indexOf(DataAccess.SEPARATOR);
+                    if (index > 0) {
+                        storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length());
+                    }
+                    // The base store needs the baseStoreIdentifier and not the relative URL (if it exists)
+                    int endOfId = storageLocation.indexOf("//");
+                    fullStorageLocation = (endOfId>-1) ? storageLocation.substring(0, endOfId) : storageLocation;
+
+                    switch (baseDriverType) {
+                    case DataAccess.S3:
+                        fullStorageLocation = baseDriverId + DataAccess.SEPARATOR
+                                + getConfigParamForDriver(baseDriverId, S3AccessIO.BUCKET_NAME) + "/"
+                                + fullStorageLocation;
+                        break;
+                    case DataAccess.FILE:
+                        fullStorageLocation = baseDriverId + DataAccess.SEPARATOR
+                                + getConfigParamForDriver(baseDriverId, FileAccessIO.DIRECTORY, "/tmp/files")
+                                + "/" + fullStorageLocation;
+                        break;
+                    default:
+                        logger.warning("Not Supported: " + this.getClass().getName() + " store with base store type: "
+                                + getConfigParamForDriver(baseDriverId, StorageIO.TYPE));
+                        throw new IOException("Not supported");
+                    }
+                }
+                baseStore = DataAccess.getDirectStorageIO(fullStorageLocation);
+            }
+            if (baseDriverType.contentEquals(DataAccess.S3)) {
+                ((S3AccessIO<?>) baseStore).setMainDriver(false);
+            }
+        }
+        remoteStoreName = getConfigParam(REMOTE_STORE_NAME);
+        try {
+            remoteStoreUrl = new URL(getConfigParam(REMOTE_STORE_URL));
+        } catch (MalformedURLException mfue) {
+            logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId);
+        }
+    }
+
+    protected String getStoragePath() throws IOException {
+        String fullStoragePath = dvObject.getStorageIdentifier();
+        logger.fine("storageidentifier: " + fullStoragePath);
+        int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR);
+        if (driverIndex >= 0) {
+            fullStoragePath = fullStoragePath
+                    .substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length());
+        }
+        int suffixIndex = fullStoragePath.indexOf("//");
+        if (suffixIndex >= 0) {
+            fullStoragePath = fullStoragePath.substring(0, suffixIndex);
+        }
+        if (getDvObject() instanceof Dataset) {
+            fullStoragePath = getDataset().getAuthorityForFileStorage() + "/"
+                    + getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath;
+        } else if (getDvObject() instanceof DataFile) {
+            fullStoragePath = getDataFile().getOwner().getAuthorityForFileStorage() + "/"
+                    + getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath;
+        } else if (dvObject instanceof Dataverse) {
+            throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject");
+        }
+        logger.fine("fullStoragePath: " + fullStoragePath);
+        return fullStoragePath;
+    }
+    
+    public static boolean isNotDataverseAccessible(String storeId) {
+        return Boolean.parseBoolean(StorageIO.getConfigParamForDriver(storeId, FILES_NOT_ACCESSIBLE_BY_DATAVERSE));
+    }
+
+
+
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java
index d046fa4661d..a1bcbe49327 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/DataAccess.java
@@ -48,6 +48,7 @@ public DataAccess() {
     public static final String S3 = "s3";
     static final String SWIFT = "swift";
     static final String REMOTE = "remote";
+    public static final String GLOBUS = "globus";
     static final String TMP = "tmp";
     public static final String SEPARATOR = "://";
     //Default to "file" is for tests only
@@ -98,6 +99,8 @@ protected static <T extends DvObject> StorageIO<T> getStorageIO(T dvObject, Data
 			return new SwiftAccessIO<>(dvObject, req, storageDriverId);
 		case REMOTE:
 			return new RemoteOverlayAccessIO<>(dvObject, req, storageDriverId);
+	     case GLOBUS:
+	            return new GlobusOverlayAccessIO<>(dvObject, req, storageDriverId);
 		case TMP:
 			throw new IOException(
 					"DataAccess IO attempted on a temporary file that hasn't been permanently saved yet.");
@@ -129,6 +132,8 @@ public static StorageIO<DvObject> getDirectStorageIO(String fullStorageLocation)
             return new SwiftAccessIO<>(storageLocation, storageDriverId);
         case REMOTE:
             return new RemoteOverlayAccessIO<>(storageLocation, storageDriverId);
+        case GLOBUS:
+            return new GlobusOverlayAccessIO<>(storageLocation, storageDriverId);
         default:
         	logger.warning("Could not find storage driver for: " + fullStorageLocation);
         	throw new IOException("getDirectStorageIO: Unsupported storage method.");
@@ -148,19 +153,41 @@ public static String[] getDriverIdAndStorageLocation(String storageLocation) {
     }
     
     public static String getStorageIdFromLocation(String location) {
-    	if(location.contains(SEPARATOR)) {
-    		//It's a full location with a driverId, so strip and reapply the driver id
-    		//NOte that this will strip the bucketname out (which s3 uses) but the S3IOStorage class knows to look at re-insert it
-    		return location.substring(0,location.indexOf(SEPARATOR) +3) + location.substring(location.lastIndexOf('/')+1); 
-    	}
-    	return location.substring(location.lastIndexOf('/')+1);
+        if (location.contains(SEPARATOR)) {
+            // It's a full location with a driverId, so strip and reapply the driver id
+            // NOte that this will strip the bucketname out (which s3 uses) but the
+            // S3IOStorage class knows to look at re-insert it
+            return location.substring(0, location.indexOf(SEPARATOR) + 3)
+                    + location.substring(location.lastIndexOf('/') + 1);
+        }
+        return location.substring(location.lastIndexOf('/') + 1);
+    }
+    
+    /** Changes storageidentifiers of the form
+     * s3://bucketname/18b39722140-50eb7d3c5ece or file://18b39722140-50eb7d3c5ece to s3://10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece
+     * and
+     * 18b39722140-50eb7d3c5ece to 10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece
+     * @param id
+     * @param dataset
+     * @return
+     */
+    public static String getLocationFromStorageId(String id, Dataset dataset) {
+        String path= dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/";
+        if (id.contains(SEPARATOR)) {
+            // It's a full location with a driverId, so strip and reapply the driver id
+            // NOte that this will strip the bucketname out (which s3 uses) but the
+            // S3IOStorage class knows to look at re-insert it
+            return id.substring(0, id.indexOf(SEPARATOR) + 3) + path
+                    + id.substring(id.lastIndexOf('/') + 1);
+        }
+        return path + id.substring(id.lastIndexOf('/') + 1);
     }
     
     public static String getDriverType(String driverId) {
     	if(driverId.isEmpty() || driverId.equals("tmp")) {
     		return "tmp";
     	}
-    	return System.getProperty("dataverse.files." + driverId + ".type", "Undefined");
+    	return StorageIO.getConfigParamForDriver(driverId, StorageIO.TYPE, "Undefined");
     }
     
     //This 
@@ -168,7 +195,7 @@ public static String getDriverPrefix(String driverId) throws IOException {
         if(driverId.isEmpty() || driverId.equals("tmp")) {
             return "tmp" + SEPARATOR;
         }
-        String storageType = System.getProperty("dataverse.files." + driverId + ".type", "Undefined");
+        String storageType = StorageIO.getConfigParamForDriver(driverId, StorageIO.TYPE, "Undefined");
         switch(storageType) {
         case FILE:
             return FileAccessIO.getDriverPrefix(driverId);
@@ -236,7 +263,8 @@ public static <T extends DvObject> StorageIO<T> createNewStorageIO(T dvObject, S
         	storageIO = new S3AccessIO<>(dvObject, null, storageDriverId);
         	break;
         case REMOTE:
-            storageIO = createNewStorageIO(dvObject, storageTag, RemoteOverlayAccessIO.getBaseStoreIdFor(storageDriverId)) ;
+        case GLOBUS:
+            storageIO = createNewStorageIO(dvObject, storageTag, AbstractRemoteOverlayAccessIO.getBaseStoreIdFor(storageDriverId)) ;
             break;
         default:
         	logger.warning("Could not find storage driver for: " + storageTag);
@@ -369,9 +397,35 @@ public static boolean isValidDirectStorageIdentifier(String storageId) {
             return S3AccessIO.isValidIdentifier(driverId, storageId);
         case REMOTE:
             return RemoteOverlayAccessIO.isValidIdentifier(driverId, storageId);
+        case GLOBUS:
+            return GlobusOverlayAccessIO.isValidIdentifier(driverId, storageId);
         default:
             logger.warning("Request to validate for storage driver: " + driverId);
         }
         return false;
     }
+
+
+
+    public static String getNewStorageIdentifier(String driverId) {
+        String storageType = DataAccess.getDriverType(driverId);
+        if (storageType.equals("tmp") || storageType.equals("Undefined")) {
+            return null;
+        }
+        switch (storageType) {
+        case FILE:
+            return FileAccessIO.getNewIdentifier(driverId);
+        case SWIFT:
+            return SwiftAccessIO.getNewIdentifier(driverId);
+        case S3:
+            return S3AccessIO.getNewIdentifier(driverId);
+        case REMOTE:
+            return RemoteOverlayAccessIO.getNewIdentifier(driverId);
+        case GLOBUS:
+            return GlobusOverlayAccessIO.getNewIdentifier(driverId);
+        default:
+            logger.warning("Request to validate for storage driver: " + driverId);
+        }
+        return null;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java
index 8ee3f0cf53c..f2a1312a150 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIO.java
@@ -35,8 +35,6 @@
 import java.util.List;
 import java.util.function.Predicate;
 import java.util.logging.Logger;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
 // Dataverse imports:
@@ -55,6 +53,7 @@
 public class FileAccessIO<T extends DvObject> extends StorageIO<T> {
 
     private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.FileAccessIO");
+    public static final String DIRECTORY = "directory";
 
 
     public FileAccessIO() {
@@ -115,7 +114,7 @@ public void open (DataAccessOption... options) throws IOException {
 
                 this.setInputStream(fin);
                 setChannel(fin.getChannel());
-                this.setSize(getLocalFileSize());
+                this.setSize(retrieveSizeFromMedia());
 
                 if (dataFile.getContentType() != null
                         && dataFile.getContentType().equals("text/tab-separated-values")
@@ -506,21 +505,6 @@ public void delete() throws IOException {
     
     // Auxilary helper methods, filesystem access-specific:
     
-    private long getLocalFileSize () {
-        long fileSize = -1;
-
-        try {
-            File testFile = getFileSystemPath().toFile();
-            if (testFile != null) {
-                fileSize = testFile.length();
-            }
-            return fileSize;
-        } catch (IOException ex) {
-            return -1;
-        }
-
-    }
-
     public FileInputStream openLocalFileAsInputStream () {
         FileInputStream in;
 
@@ -565,21 +549,26 @@ private String getDatasetDirectory() throws IOException {
         if (isDirectAccess()) {
             throw new IOException("No DvObject defined in the Data Access Object");
         }
-
-        Path datasetDirectoryPath=null;
         
+        String authorityForFS = null;
+        String identifierForFS = null;
         if (dvObject instanceof Dataset) {
-            datasetDirectoryPath = Paths.get(this.getDataset().getAuthorityForFileStorage(), this.getDataset().getIdentifierForFileStorage());
+            authorityForFS = this.getDataset().getAuthorityForFileStorage();
+            identifierForFS = this.getDataset().getIdentifierForFileStorage();
         } else if (dvObject instanceof DataFile) {
-            datasetDirectoryPath = Paths.get(this.getDataFile().getOwner().getAuthorityForFileStorage(), this.getDataFile().getOwner().getIdentifierForFileStorage());
+            authorityForFS = this.getDataFile().getOwner().getAuthorityForFileStorage();
+            identifierForFS = this.getDataFile().getOwner().getIdentifierForFileStorage();
         } else if (dvObject instanceof Dataverse) {
             throw new IOException("FileAccessIO: Dataverses are not a supported dvObject");
         }
-            
-        if (datasetDirectoryPath == null) {
+        
+        if (authorityForFS == null || identifierForFS == null) {
             throw new IOException("Could not determine the filesystem directory of the parent dataset.");
         }
-        String datasetDirectory = Paths.get(getFilesRootDirectory(), datasetDirectoryPath.toString()).toString();
+        
+        // Determine the final directory tree. As of JDK 16, the first component of the path MUST be non-null
+        // (we check for that via the setting), but also the others make no sense if they are null.
+        String datasetDirectory = Paths.get(getFilesRootDirectory(), authorityForFS, identifierForFS).toString();
 
         if (dvObject.getStorageIdentifier() == null || dvObject.getStorageIdentifier().isEmpty()) {
             throw new IOException("Data Access: No local storage identifier defined for this datafile.");
@@ -590,7 +579,7 @@ private String getDatasetDirectory() throws IOException {
     
     
     protected String getFilesRootDirectory() {
-        String filesRootDirectory = System.getProperty("dataverse.files." + this.driverId + ".directory", "/tmp/files");
+        String filesRootDirectory = getConfigParam(DIRECTORY, "/tmp/files");
         return filesRootDirectory;
     }
     
@@ -737,4 +726,18 @@ public List<String> cleanUp(Predicate<String> filter, boolean dryRun) throws IOE
         return toDelete;
     }
 
+    @Override
+    public long retrieveSizeFromMedia() {
+        long fileSize = -1;
+        try {
+            File testFile = getFileSystemPath().toFile();
+            if (testFile != null) {
+                fileSize = testFile.length();
+            }
+            return fileSize;
+        } catch (IOException ex) {
+            return -1;
+        }
+    }
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java
new file mode 100644
index 00000000000..8bed60d8302
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusAccessibleStore.java
@@ -0,0 +1,78 @@
+package edu.harvard.iq.dataverse.dataaccess;
+
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+
+public interface GlobusAccessibleStore {
+
+    //Whether Dataverse manages access controls for the Globus endpoint or not.
+    static final String MANAGED = "managed";
+    /*
+     * transfer and reference endpoint formats: <globusEndpointId/basePath>
+     * 
+     * REFERENCE_ENDPOINTS_WITH_BASEPATHS - reference endpoints separated by a comma
+     */
+    static final String TRANSFER_ENDPOINT_WITH_BASEPATH = "transfer-endpoint-with-basepath";
+    static final String GLOBUS_TOKEN = "globus-token";
+    
+    public static boolean isDataverseManaged(String driverId) {
+        return Boolean.parseBoolean(StorageIO.getConfigParamForDriver(driverId, MANAGED));
+    }
+    
+    public static String getTransferEnpointWithPath(String driverId) {
+        return StorageIO.getConfigParamForDriver(driverId, GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH);
+    }
+    
+    public static String getTransferEndpointId(String driverId) {
+        String endpointWithBasePath = StorageIO.getConfigParamForDriver(driverId, TRANSFER_ENDPOINT_WITH_BASEPATH);
+        int pathStart = endpointWithBasePath.indexOf("/");
+        return pathStart > 0 ? endpointWithBasePath.substring(0, pathStart) : endpointWithBasePath;
+    }
+    
+    public static String getTransferPath(String driverId) {
+        String endpointWithBasePath = StorageIO.getConfigParamForDriver(driverId, TRANSFER_ENDPOINT_WITH_BASEPATH);
+        int pathStart = endpointWithBasePath.indexOf("/");
+        return pathStart > 0 ? endpointWithBasePath.substring(pathStart) : "";
+
+    }
+
+    public static JsonArray getReferenceEndpointsWithPaths(String driverId) {
+        String[] endpoints = StorageIO.getConfigParamForDriver(driverId, AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS).split("\\s*,\\s*");
+        JsonArrayBuilder builder = Json.createArrayBuilder();
+        for(int i=0;i<endpoints.length;i++) {
+            builder.add(endpoints[i]);
+        }
+        return builder.build();
+    }
+    
+    public static boolean acceptsGlobusTransfers(String storeId) {
+        if(StorageIO.getConfigParamForDriver(storeId, TRANSFER_ENDPOINT_WITH_BASEPATH) != null) {
+            return true;
+        }
+        return false;
+    }
+
+    public static boolean allowsGlobusReferences(String storeId) {
+        if(StorageIO.getConfigParamForDriver(storeId, AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS) != null) {
+            return true;
+        }
+        return false;
+    }
+    
+    public static String getGlobusToken(String storeId) {
+        return StorageIO.getConfigParamForDriver(storeId, GLOBUS_TOKEN);
+    }
+    
+    public static boolean isGlobusAccessible(String storeId) {
+        String type = StorageIO.getConfigParamForDriver(storeId, StorageIO.TYPE);
+        if (type.equals(DataAccess.GLOBUS)) {
+            return true;
+        } else if (type.equals(DataAccess.S3)
+                && StorageIO.getConfigParamForDriver(storeId, TRANSFER_ENDPOINT_WITH_BASEPATH) != null) {
+            return true;
+        }
+        return false;
+    }
+    
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java
new file mode 100644
index 00000000000..7a6809cb2ff
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIO.java
@@ -0,0 +1,503 @@
+package edu.harvard.iq.dataverse.dataaccess;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.datavariable.DataVariable;
+import edu.harvard.iq.dataverse.globus.AccessToken;
+import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
+import edu.harvard.iq.dataverse.util.UrlSignerUtil;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.InvalidPathException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.http.client.ClientProtocolException;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.util.EntityUtils;
+
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+
+/**
+ * @author qqmyers
+ * 
+ * This class implements three related use cases, all of which leverage the underlying idea of using a base store (as with the Https RemoteOverlay store):
+ *   Managed - where Dataverse has control of the specified Globus endpoint and can set/remove permissions as needed to allow file transfers in/out:
+ *      File/generic endpoint - assumes Dataverse does not have access to the datafile contents
+ *      S3-Connector endpoint - assumes the datafiles are accessible via Globus and via S3 such that Dataverse can access to the datafile contents when needed.
+ *   Remote - where Dataverse references files that remain at remote Globus endpoints (as with the Https RemoteOverlay store) and cannot access to the datafile contents.
+ *   
+ *   Note that Globus endpoints can provide Http URLs to get file contents, so a future enhancement could potentially support datafile contents access in the Managed/File and Remote cases. 
+ *   
+ *    */
+/*
+ * Globus Overlay Driver storageIdentifer format:
+ * 
+ * Remote: StorageIdentifier format:
+ * <globusDriverId>://<baseStorageIdentifier>//<relativePath> 
+ * 
+ * Storage location:
+ * <globusendpointId/basepath>/<relPath> 
+ * 
+ * Internal StorageIdentifier format:
+ * <globusDriverId>://<baseStorageIdentifier> 
+ * 
+ * Storage location:
+ * <globusEndpointId/basepath>/<dataset authority>/<dataset identifier>/<baseStorageIdentifier>
+ * 
+ */
+public class GlobusOverlayAccessIO<T extends DvObject> extends AbstractRemoteOverlayAccessIO<T> implements GlobusAccessibleStore {
+    private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIO");
+
+    /*
+     * If this is set to true, the store supports Globus transfer in and
+     * Dataverse/the globus app manage file locations, access controls, deletion,
+     * etc.
+     */
+    private Boolean dataverseManaged = null;
+
+    private String relativeDirectoryPath;
+    
+    private String endpointPath;
+    
+    private String filename;
+
+    private String[] allowedEndpoints;
+    private String endpoint;
+
+    public GlobusOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException {
+        super(dvObject, req, driverId);
+        configureGlobusEndpoints();
+        configureStores(req, driverId, null);
+        logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier());
+        path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2);
+        validatePath(path);
+
+        logger.fine("Relative path: " + path);
+    }
+
+
+    public GlobusOverlayAccessIO(String storageLocation, String driverId) throws IOException {
+        this.driverId = driverId;
+        configureGlobusEndpoints();
+        configureStores(null, driverId, storageLocation);
+        if (isManaged()) {
+            String[] parts = DataAccess.getDriverIdAndStorageLocation(storageLocation);
+            path = parts[1];
+        } else {
+            this.setIsLocalFile(false);
+            path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2);
+            validatePath(path);
+            logger.fine("Referenced path: " + path);
+        }
+    }
+    
+    private boolean isManaged() {
+        if(dataverseManaged==null) {
+            dataverseManaged = GlobusAccessibleStore.isDataverseManaged(this.driverId);
+        }
+        return dataverseManaged;
+    }
+    
+    private String retrieveGlobusAccessToken() {
+        String globusToken = getConfigParam(GlobusAccessibleStore.GLOBUS_TOKEN);
+        
+
+        AccessToken accessToken = GlobusServiceBean.getClientToken(globusToken);
+        return accessToken.getOtherTokens().get(0).getAccessToken();
+    }
+
+    private void parsePath() {
+        int filenameStart = path.lastIndexOf("/") + 1;
+        String endpointWithBasePath = null;
+        if (!isManaged()) {
+            endpointWithBasePath = findMatchingEndpoint(path, allowedEndpoints);
+        } else {
+            endpointWithBasePath = allowedEndpoints[0];
+        }
+        //String endpointWithBasePath = baseEndpointPath.substring(baseEndpointPath.lastIndexOf(DataAccess.SEPARATOR) + 3);
+        int pathStart = endpointWithBasePath.indexOf("/");
+        logger.fine("endpointWithBasePath: " + endpointWithBasePath);
+        endpointPath = "/" + (pathStart > 0 ? endpointWithBasePath.substring(pathStart + 1) : "");
+        logger.fine("endpointPath: " + endpointPath);
+        
+
+        if (isManaged() && (dvObject!=null)) {
+            
+            Dataset ds = null;
+            if (dvObject instanceof Dataset) {
+                ds = (Dataset) dvObject;
+            } else if (dvObject instanceof DataFile) {
+                ds = ((DataFile) dvObject).getOwner();
+            }
+            relativeDirectoryPath = "/" + ds.getAuthority() + "/" + ds.getIdentifier();
+        } else {
+            relativeDirectoryPath = "";
+        }
+        if (filenameStart > 0) {
+            relativeDirectoryPath = relativeDirectoryPath + path.substring(0, filenameStart);
+        }
+        logger.fine("relativeDirectoryPath finally: " + relativeDirectoryPath);
+        filename = path.substring(filenameStart);
+        endpoint = pathStart > 0 ? endpointWithBasePath.substring(0, pathStart) : endpointWithBasePath;
+
+        
+    }
+
+    private static String findMatchingEndpoint(String path, String[] allowedEndpoints) {
+        for(int i=0;i<allowedEndpoints.length;i++) {
+            if (path.startsWith(allowedEndpoints[i])) {
+                return allowedEndpoints[i];
+            }
+        }
+        logger.warning("Could not find matching endpoint for path: " + path);
+        return null;
+    }
+
+    protected void validatePath(String relPath) throws IOException {
+        if (isManaged()) {
+            if (!usesStandardNamePattern(relPath)) {
+                throw new IOException("Unacceptable identifier pattern in submitted identifier: " + relPath);
+            }
+        } else {
+            try {
+                String endpoint = findMatchingEndpoint(relPath, allowedEndpoints);
+                logger.fine(endpoint + "  " + relPath);
+
+                if (endpoint == null || !Paths.get(endpoint, relPath).normalize().startsWith(endpoint)) {
+                    throw new IOException(
+                            "storageidentifier doesn't start with one of " + this.driverId + "'s allowed endpoints");
+                }
+            } catch (InvalidPathException e) {
+                throw new IOException("Could not interpret storageidentifier in globus store " + this.driverId);
+            }
+        }
+
+    }
+
+    // Call the Globus API to get the file size
+    @Override
+    public long retrieveSizeFromMedia() {
+        parsePath();
+        String globusAccessToken = retrieveGlobusAccessToken();
+        // Construct Globus URL
+        URI absoluteURI = null;
+        try {
+
+            absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/operation/endpoint/" + endpoint
+                    + "/ls?path=" + endpointPath + relativeDirectoryPath + "&filter=name:" + filename);
+            HttpGet get = new HttpGet(absoluteURI);
+
+            get.addHeader("Authorization", "Bearer " + globusAccessToken);
+            CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext);
+            if (response.getStatusLine().getStatusCode() == 200) {
+                // Get reponse as string
+                String responseString = EntityUtils.toString(response.getEntity());
+                logger.fine("Response from " + get.getURI().toString() + " is: " + responseString);
+                JsonObject responseJson = JsonUtil.getJsonObject(responseString);
+                JsonArray dataArray = responseJson.getJsonArray("DATA");
+                if (dataArray != null && dataArray.size() != 0) {
+                    //File found
+                    return (long) responseJson.getJsonArray("DATA").getJsonObject(0).getInt("size");
+                }
+            } else {
+                logger.warning("Response from " + get.getURI().toString() + " was "
+                        + response.getStatusLine().getStatusCode());
+                logger.fine(EntityUtils.toString(response.getEntity()));
+            }
+        } catch (URISyntaxException e) {
+            // Should have been caught in validatePath
+            e.printStackTrace();
+        } catch (ClientProtocolException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        } catch (IOException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+        return -1;
+    }
+
+    
+    @Override
+    public InputStream getInputStream() throws IOException {
+        //Currently only supported when using an S3 store with the Globus S3Connector.
+        //ToDo: Support when using a managed Globus endpoint that supports http access
+        if(!AbstractRemoteOverlayAccessIO.isNotDataverseAccessible(endpoint)) {
+            return baseStore.getInputStream();
+        } else {
+            throw new IOException("Not implemented");
+        }
+    }
+    
+    @Override
+    public void delete() throws IOException {
+
+        parsePath();
+        // Delete is best-effort - we tell the endpoint to delete don't monitor whether
+        // it succeeds
+        if (!isDirectAccess()) {
+            throw new IOException("Direct Access IO must be used to permanently delete stored file objects");
+        }
+        String globusAccessToken = retrieveGlobusAccessToken();
+        // Construct Globus URL
+        URI absoluteURI = null;
+        try {
+
+            absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/submission_id");
+            HttpGet get = new HttpGet(absoluteURI);
+
+            get.addHeader("Authorization", "Bearer " + globusAccessToken);
+            CloseableHttpResponse response = getSharedHttpClient().execute(get, localContext);
+            if (response.getStatusLine().getStatusCode() == 200) {
+                // Get reponse as string
+                String responseString = EntityUtils.toString(response.getEntity());
+                logger.fine("Response from " + get.getURI().toString() + " is: " + responseString);
+                JsonObject responseJson = JsonUtil.getJsonObject(responseString);
+                String submissionId = responseJson.getString("value");
+                logger.fine("submission_id for delete is: " + submissionId);
+                absoluteURI = new URI("https://transfer.api.globusonline.org/v0.10/delete");
+                HttpPost post = new HttpPost(absoluteURI);
+                JsonObjectBuilder taskJsonBuilder = Json.createObjectBuilder();
+                taskJsonBuilder.add("submission_id", submissionId).add("DATA_TYPE", "delete").add("endpoint", endpoint)
+                        .add("DATA", Json.createArrayBuilder().add(Json.createObjectBuilder().add("DATA_TYPE", "delete_item").add("path",
+                                endpointPath + relativeDirectoryPath + "/" + filename)));
+                post.setHeader("Content-Type", "application/json");
+                post.addHeader("Authorization", "Bearer " + globusAccessToken);
+                String taskJson= JsonUtil.prettyPrint(taskJsonBuilder.build());
+                logger.fine("Sending: " + taskJson);
+                post.setEntity(new StringEntity(taskJson, "utf-8"));
+                CloseableHttpResponse postResponse = getSharedHttpClient().execute(post, localContext);
+                int statusCode=postResponse.getStatusLine().getStatusCode();
+                logger.fine("Response :" + statusCode + ": " +postResponse.getStatusLine().getReasonPhrase());
+                switch (statusCode) {
+                case 202:
+                    // ~Success - delete task was accepted
+                    logger.fine("Globus delete initiated: " + EntityUtils.toString(postResponse.getEntity()));
+                    break;
+                case 200:
+                    // Duplicate - delete task was already accepted
+                    logger.warning("Duplicate Globus delete: " + EntityUtils.toString(postResponse.getEntity()));
+                    break;
+                default:
+                    logger.warning("Response from " + post.getURI().toString() + " was "
+                            + postResponse.getStatusLine().getStatusCode());
+                    logger.fine(EntityUtils.toString(postResponse.getEntity()));
+                }
+
+            } else {
+                logger.warning("Response from " + get.getURI().toString() + " was "
+                        + response.getStatusLine().getStatusCode());
+                logger.fine(EntityUtils.toString(response.getEntity()));
+            }
+        } catch (Exception e) {
+            logger.warning(e.getMessage());
+            throw new IOException("Error deleting: " + endpoint + "/" + path);
+
+        }
+
+        // Delete all the cached aux files as well:
+        deleteAllAuxObjects();
+
+    }
+
+    @Override
+    public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliaryType, String auxiliaryFileName)
+            throws IOException {
+//Fix
+        parsePath();
+        // ToDo - support remote auxiliary Files
+        if (auxiliaryTag == null) {
+            String secretKey = getConfigParam(SECRET_KEY);
+            if (secretKey == null) {
+                return endpoint + "/" + path;
+            } else {
+                return UrlSignerUtil.signUrl(endpoint + "/" + path, getUrlExpirationMinutes(), null, "GET", secretKey);
+            }
+        } else {
+            return baseStore.generateTemporaryDownloadUrl(auxiliaryTag, auxiliaryType, auxiliaryFileName);
+        }
+    }
+
+    static boolean isValidIdentifier(String driverId, String storageId) {
+        String baseIdentifier = storageId.substring(storageId.lastIndexOf("//") + 2);
+        try {
+            
+        String[] allowedEndpoints =getAllowedEndpoints(driverId);
+            
+        // Internally managed endpoints require standard name pattern (submitted via
+        // /addFile(s) api)
+        if (GlobusAccessibleStore.isDataverseManaged(driverId)) {
+            boolean hasStandardName = usesStandardNamePattern(baseIdentifier);
+            if (hasStandardName) {
+                return true;
+            } else {
+                logger.warning("Unacceptable identifier pattern in submitted identifier: " + baseIdentifier);
+                return false;
+            }
+        }
+        // Remote endpoints require a valid URI within the baseUrl
+        try {
+            String endpoint = findMatchingEndpoint(baseIdentifier, allowedEndpoints);
+            
+            if(endpoint==null || !Paths.get(endpoint, baseIdentifier).normalize().startsWith(endpoint)) {
+                logger.warning("storageidentifier doesn't start with one of " + driverId + "'s allowed endpoints");
+                return false;
+            }
+        } catch (InvalidPathException e) {
+            logger.warning("Could not interpret storageidentifier in globus store " + driverId);
+            return false;
+        }
+        return true;
+        } catch (IOException e) {
+            return false;
+        }
+        
+    }
+
+    @Override
+    public String getStorageLocation() throws IOException {
+        parsePath();
+        if (isManaged()) {
+            return this.driverId + DataAccess.SEPARATOR + relativeDirectoryPath + "/" + filename;
+        } else {
+            return super.getStorageLocation();
+        }
+    }
+    
+    /** This endpoint configures all the endpoints the store is allowed to reference data from. At present, the RemoteOverlayAccessIO only supports a single endpoint but
+     * the derived GlobusOverlayAccessIO can support multiple endpoints.
+     * @throws IOException
+     */
+    protected void configureGlobusEndpoints() throws IOException {
+        allowedEndpoints = getAllowedEndpoints(this.driverId);
+        logger.fine("Set allowed endpoints: " + Arrays.toString(allowedEndpoints));
+    }
+    
+    private static String[] getAllowedEndpoints(String driverId) throws IOException {
+        String[] allowedEndpoints = null;
+        if (GlobusAccessibleStore.isDataverseManaged(driverId)) {
+            allowedEndpoints = new String[1];
+            allowedEndpoints[0] = getConfigParamForDriver(driverId, TRANSFER_ENDPOINT_WITH_BASEPATH);
+            if (allowedEndpoints[0] == null) {
+                throw new IOException(
+                        "dataverse.files." + driverId + "." + TRANSFER_ENDPOINT_WITH_BASEPATH + " is required");
+            }
+        } else {
+            String rawEndpoints = getConfigParamForDriver(driverId, REFERENCE_ENDPOINTS_WITH_BASEPATHS);
+            if (rawEndpoints != null) {
+                allowedEndpoints = getConfigParamForDriver(driverId, REFERENCE_ENDPOINTS_WITH_BASEPATHS).split("\\s*,\\s*");
+            }
+            if (rawEndpoints == null || allowedEndpoints.length == 0) {
+                throw new IOException("dataverse.files." + driverId + ".base-url is required");
+            }
+        }
+        return allowedEndpoints;
+    }
+
+
+    @Override
+    public void open(DataAccessOption... options) throws IOException {
+
+        baseStore.open(options);
+
+        DataAccessRequest req = this.getRequest();
+
+        if (isWriteAccessRequested(options)) {
+            isWriteAccess = true;
+            isReadAccess = false;
+        } else {
+            isWriteAccess = false;
+            isReadAccess = true;
+        }
+
+        if (dvObject instanceof DataFile) {
+            String storageIdentifier = dvObject.getStorageIdentifier();
+
+            DataFile dataFile = this.getDataFile();
+
+            if (req != null && req.getParameter("noVarHeader") != null) {
+                baseStore.setNoVarHeader(true);
+            }
+
+            if (storageIdentifier == null || "".equals(storageIdentifier)) {
+                throw new FileNotFoundException("Data Access: No local storage identifier defined for this datafile.");
+            }
+
+            logger.fine("StorageIdentifier is: " + storageIdentifier);
+
+            if (isReadAccess) {
+                if (dataFile.getFilesize() >= 0) {
+                    this.setSize(dataFile.getFilesize());
+                } else {
+                    logger.fine("Setting size");
+                    this.setSize(retrieveSizeFromMedia());
+                }
+                // Only applies for the S3 Connector case (where we could have run an ingest)
+                if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values")
+                        && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) {
+
+                    List<DataVariable> datavariables = dataFile.getDataTable().getDataVariables();
+                    String varHeaderLine = generateVariableHeader(datavariables);
+                    this.setVarHeader(varHeaderLine);
+                }
+
+            }
+
+            this.setMimeType(dataFile.getContentType());
+
+            try {
+                this.setFileName(dataFile.getFileMetadata().getLabel());
+            } catch (Exception ex) {
+                this.setFileName("unknown");
+            }
+        } else if (dvObject instanceof Dataset) {
+            throw new IOException(
+                    "Data Access: " + this.getClass().getName() + " does not support dvObject type Dataverse yet");
+        } else if (dvObject instanceof Dataverse) {
+            throw new IOException(
+                    "Data Access: " + this.getClass().getName() + " does not support dvObject type Dataverse yet");
+        }
+    }
+
+    @Override
+    public Path getFileSystemPath() throws IOException {
+        throw new UnsupportedDataAccessOperationException(
+                this.getClass().getName() + ": savePath() not implemented in this storage driver.");
+    }
+
+    @Override
+    public void savePath(Path fileSystemPath) throws IOException {
+        throw new UnsupportedDataAccessOperationException(
+                this.getClass().getName() + ": savePath() not implemented in this storage driver.");
+    }
+
+    @Override
+    public void saveInputStream(InputStream inputStream) throws IOException {
+        throw new UnsupportedDataAccessOperationException(
+                this.getClass().getName() + ": savePath() not implemented in this storage driver.");
+    }
+
+    @Override
+    public void saveInputStream(InputStream inputStream, Long filesize) throws IOException {
+        throw new UnsupportedDataAccessOperationException(
+                this.getClass().getName() + ": savePath() not implemented in this storage driver.");
+    }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java
index 2b4aed3a9a5..2de37174a3b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/ImageThumbConverter.java
@@ -48,6 +48,7 @@
 import java.nio.channels.WritableByteChannel;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.util.logging.Level;
 import java.util.logging.Logger;
 import org.apache.commons.io.IOUtils;
 //import org.primefaces.util.Base64;
@@ -110,19 +111,30 @@ private static boolean isThumbnailAvailable(StorageIO<DataFile> storageIO, int s
         }
 
         if (isThumbnailCached(storageIO, size)) {
+            logger.fine("Found cached thumbnail for " + file.getId());
             return true;
         }
+        return generateThumbnail(file, storageIO, size);
 
-        logger.fine("Checking for thumbnail, file type: " + file.getContentType());
+    }
 
-        if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) {
-            return generateImageThumbnail(storageIO, size);
-        } else if (file.getContentType().equalsIgnoreCase("application/pdf")) {
-            return generatePDFThumbnail(storageIO, size);
+    private static boolean generateThumbnail(DataFile file, StorageIO<DataFile> storageIO, int size) {
+        logger.log(Level.FINE, (file.isPreviewImageFail() ? "Not trying" : "Trying") + " to generate thumbnail, file id: " + file.getId());
+        // Don't try to generate if there have been failures:
+        if (!file.isPreviewImageFail()) {
+            boolean thumbnailGenerated = false;
+            if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) {
+                thumbnailGenerated = generateImageThumbnail(storageIO, size);
+            } else if (file.getContentType().equalsIgnoreCase("application/pdf")) {
+                thumbnailGenerated = generatePDFThumbnail(storageIO, size);
+            }
+            if (!thumbnailGenerated) {
+                logger.fine("No thumbnail generated for " + file.getId());
+            }
+            return thumbnailGenerated;
         }
 
         return false;
-
     }
 
     // Note that this method works on ALL file types for which thumbnail 
@@ -184,6 +196,7 @@ private static boolean generatePDFThumbnail(StorageIO<DataFile> storageIO, int s
         // We rely on ImageMagick to convert PDFs; so if it's not installed, 
         // better give up right away: 
         if (!isImageMagickInstalled()) {
+            logger.fine("Couldn't find ImageMagick");
             return false;
         }
 
@@ -206,35 +219,34 @@ private static boolean generatePDFThumbnail(StorageIO<DataFile> storageIO, int s
             tempFilesRequired = true;
 
         } catch (IOException ioex) {
+            logger.warning(ioex.getMessage());
             // this on the other hand is likely a fatal condition :(
             return false;
         }
 
         if (tempFilesRequired) {
-            ReadableByteChannel pdfFileChannel;
-
+            InputStream inputStream = null; 
             try {
                 storageIO.open();
-                //inputStream = storageIO.getInputStream();
-                pdfFileChannel = storageIO.getReadChannel();
+                inputStream = storageIO.getInputStream();
             } catch (Exception ioex) {
                 logger.warning("caught Exception trying to open an input stream for " + storageIO.getDataFile().getStorageIdentifier());
                 return false;
             }
 
             File tempFile;
-            FileChannel tempFileChannel = null;
+            OutputStream outputStream = null;
             try {
                 tempFile = File.createTempFile("tempFileToRescale", ".tmp");
-                tempFileChannel = new FileOutputStream(tempFile).getChannel();
-
-                tempFileChannel.transferFrom(pdfFileChannel, 0, storageIO.getSize());
+                outputStream = new FileOutputStream(tempFile);
+                //Reads/transfers all bytes from the input stream to the output stream. 
+                inputStream.transferTo(outputStream);
             } catch (IOException ioex) {
                 logger.warning("GenerateImageThumb: failed to save pdf bytes in a temporary file.");
                 return false;
             } finally {
-                IOUtils.closeQuietly(tempFileChannel);
-                IOUtils.closeQuietly(pdfFileChannel);
+                IOUtils.closeQuietly(inputStream);
+                IOUtils.closeQuietly(outputStream);
             }
             sourcePdfFile = tempFile;
         }
@@ -436,16 +448,8 @@ public static String getImageThumbnailAsBase64(DataFile file, int size) {
         if (cachedThumbnailChannel == null) {
             logger.fine("Null channel for aux object " + THUMBNAIL_SUFFIX + size);
 
-            // try to generate, if not available: 
-            boolean generated = false;
-            if (file.getContentType().substring(0, 6).equalsIgnoreCase("image/")) {
-                generated = generateImageThumbnail(storageIO, size);
-            } else if (file.getContentType().equalsIgnoreCase("application/pdf")) {
-                generated = generatePDFThumbnail(storageIO, size);
-            }
-
-            if (generated) {
-                // try to open again: 
+            // try to generate, if not available and hasn't failed before
+            if(generateThumbnail(file, storageIO, size)) {
                 try {
                     cachedThumbnailChannel = storageIO.openAuxChannel(THUMBNAIL_SUFFIX + size);
                 } catch (Exception ioEx) {
@@ -757,7 +761,7 @@ public static String generatePDFThumbnailFromFile(String fileLocation, int size)
             try {
                 fileSize = new File(fileLocation).length();
             } catch (Exception ex) {
-                // 
+               logger.warning("Can't open file: " + fileLocation);
             }
 
             if (fileSize == 0 || fileSize > sizeLimit) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java
index be6f9df0254..de392b74cca 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/InputStreamIO.java
@@ -165,4 +165,9 @@ public List<String> cleanUp(Predicate<String> filter, boolean dryRun) throws IOE
         throw new UnsupportedDataAccessOperationException("InputStreamIO: tthis method is not supported in this DataAccess driver.");
     }
 
+    @Override
+    public long retrieveSizeFromMedia() throws UnsupportedDataAccessOperationException {
+        throw new UnsupportedDataAccessOperationException("InputStreamIO: this method is not supported in this DataAccess driver.");
+    }
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java
index 66c6a4cc2ee..1616bfabf96 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIO.java
@@ -11,105 +11,77 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.net.URL;
 import java.nio.channels.Channel;
 import java.nio.channels.Channels;
 import java.nio.channels.ReadableByteChannel;
 import java.nio.channels.WritableByteChannel;
 import java.nio.file.Path;
-import java.security.KeyManagementException;
-import java.security.KeyStoreException;
-import java.security.NoSuchAlgorithmException;
 import java.util.List;
-import java.util.function.Predicate;
-import java.util.logging.Logger;
 
 import org.apache.http.Header;
-import org.apache.http.client.config.CookieSpecs;
-import org.apache.http.client.config.RequestConfig;
 import org.apache.http.client.methods.CloseableHttpResponse;
 import org.apache.http.client.methods.HttpDelete;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.methods.HttpHead;
-import org.apache.http.client.protocol.HttpClientContext;
-import org.apache.http.config.Registry;
-import org.apache.http.config.RegistryBuilder;
-import org.apache.http.conn.socket.ConnectionSocketFactory;
-import org.apache.http.conn.ssl.NoopHostnameVerifier;
-import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
-import org.apache.http.conn.ssl.TrustAllStrategy;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
-import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.apache.http.protocol.HTTP;
-import org.apache.http.ssl.SSLContextBuilder;
 import org.apache.http.util.EntityUtils;
 
-import javax.net.ssl.SSLContext;
-
 /**
  * @author qqmyers
- * @param <T> what it stores
  */
 /*
  * Remote Overlay Driver
  * 
  * StorageIdentifier format:
- * <httpDriverId>://<baseStorageIdentifier>//<baseUrlPath>
+ * <remoteDriverId>://<baseStorageIdentifier>//<relativePath>
+ * 
+ * baseUrl: http(s)://<host(:port)/basePath>
  */
-public class RemoteOverlayAccessIO<T extends DvObject> extends StorageIO<T> {
-
-    private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.RemoteOverlayAccessIO");
-
-    private StorageIO<DvObject> baseStore = null;
-    private String urlPath = null;
-    private String baseUrl = null;
+public class RemoteOverlayAccessIO<T extends DvObject> extends AbstractRemoteOverlayAccessIO<T> {
 
-    private static HttpClientContext localContext = HttpClientContext.create();
-    private PoolingHttpClientConnectionManager cm = null;
-    CloseableHttpClient httpclient = null;
-    private int timeout = 1200;
-    private RequestConfig config = RequestConfig.custom().setConnectTimeout(timeout * 1000)
-            .setConnectionRequestTimeout(timeout * 1000).setSocketTimeout(timeout * 1000)
-            .setCookieSpec(CookieSpecs.STANDARD).setExpectContinueEnabled(true).build();
-    private static boolean trustCerts = false;
-    private int httpConcurrency = 4;
+    // A single baseUrl of the form http(s)://<host(:port)/basePath> where this store can reference data
+    static final String BASE_URL = "base-url";
+    String baseUrl = null;
 
+    public RemoteOverlayAccessIO() {
+        super();
+    }
+    
     public RemoteOverlayAccessIO(T dvObject, DataAccessRequest req, String driverId) throws IOException {
         super(dvObject, req, driverId);
         this.setIsLocalFile(false);
+        configureRemoteEndpoints();
         configureStores(req, driverId, null);
         logger.fine("Parsing storageidentifier: " + dvObject.getStorageIdentifier());
-        urlPath = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2);
-        validatePath(urlPath);
-        
-        logger.fine("Base URL: " + urlPath);
+        path = dvObject.getStorageIdentifier().substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2);
+        validatePath(path);
+
+        logger.fine("Relative path: " + path);
     }
 
     public RemoteOverlayAccessIO(String storageLocation, String driverId) throws IOException {
         super(null, null, driverId);
         this.setIsLocalFile(false);
+        configureRemoteEndpoints();
         configureStores(null, driverId, storageLocation);
 
-        urlPath = storageLocation.substring(storageLocation.lastIndexOf("//") + 2);
-        validatePath(urlPath);
-        logger.fine("Base URL: " + urlPath);
+        path = storageLocation.substring(storageLocation.lastIndexOf("//") + 2);
+        validatePath(path);
+        logger.fine("Relative path: " + path);
     }
-    
-    private void validatePath(String path) throws IOException {
+
+    protected void validatePath(String relPath) throws IOException {
         try {
-            URI absoluteURI = new URI(baseUrl + "/" + urlPath);
-            if(!absoluteURI.normalize().toString().startsWith(baseUrl)) {
+            URI absoluteURI = new URI(baseUrl + "/" + relPath);
+            if (!absoluteURI.normalize().toString().startsWith(baseUrl)) {
                 throw new IOException("storageidentifier doesn't start with " + this.driverId + "'s base-url");
             }
-        } catch(URISyntaxException use) {
+        } catch (URISyntaxException use) {
             throw new IOException("Could not interpret storageidentifier in remote store " + this.driverId);
         }
-     }
-
+    }
 
     @Override
     public void open(DataAccessOption... options) throws IOException {
@@ -150,7 +122,7 @@ public void open(DataAccessOption... options) throws IOException {
                     this.setSize(dataFile.getFilesize());
                 } else {
                     logger.fine("Setting size");
-                    this.setSize(getSizeFromHttpHeader());
+                    this.setSize(retrieveSizeFromMedia());
                 }
                 if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values")
                         && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) {
@@ -171,18 +143,17 @@ public void open(DataAccessOption... options) throws IOException {
             }
         } else if (dvObject instanceof Dataset) {
             throw new IOException(
-                    "Data Access: RemoteOverlay Storage driver does not support dvObject type Dataverse yet");
+                    "Data Access: " + this.getClass().getName() + " does not support dvObject type Dataverse yet");
         } else if (dvObject instanceof Dataverse) {
             throw new IOException(
-                    "Data Access: RemoteOverlay Storage driver does not support dvObject type Dataverse yet");
-        } else {
-            this.setSize(getSizeFromHttpHeader());
+                    "Data Access: " + this.getClass().getName() + " does not support dvObject type Dataverse yet");
         }
     }
 
-    private long getSizeFromHttpHeader() {
+    @Override
+    public long retrieveSizeFromMedia() {
         long size = -1;
-        HttpHead head = new HttpHead(baseUrl + "/" + urlPath);
+        HttpHead head = new HttpHead(baseUrl + "/" + path);
         try {
             CloseableHttpResponse response = getSharedHttpClient().execute(head, localContext);
 
@@ -224,12 +195,12 @@ public InputStream getInputStream() throws IOException {
                     break;
                 default:
                     logger.warning("Response from " + get.getURI().toString() + " was " + code);
-                    throw new IOException("Cannot retrieve: " + baseUrl + "/" + urlPath + " code: " + code);
+                    throw new IOException("Cannot retrieve: " + baseUrl + "/" + path + " code: " + code);
                 }
             } catch (Exception e) {
                 logger.warning(e.getMessage());
                 e.printStackTrace();
-                throw new IOException("Error retrieving: " + baseUrl + "/" + urlPath + " " + e.getMessage());
+                throw new IOException("Error retrieving: " + baseUrl + "/" + path + " " + e.getMessage());
 
             }
             setChannel(Channels.newChannel(super.getInputStream()));
@@ -260,13 +231,13 @@ public void delete() throws IOException {
             throw new IOException("Direct Access IO must be used to permanently delete stored file objects");
         }
         try {
-            HttpDelete del = new HttpDelete(baseUrl + "/" + urlPath);
+            HttpDelete del = new HttpDelete(baseUrl + "/" + path);
             CloseableHttpResponse response = getSharedHttpClient().execute(del, localContext);
             try {
                 int code = response.getStatusLine().getStatusCode();
                 switch (code) {
                 case 200:
-                    logger.fine("Sent DELETE for " + baseUrl + "/" + urlPath);
+                    logger.fine("Sent DELETE for " + baseUrl + "/" + path);
                 default:
                     logger.fine("Response from DELETE on " + del.getURI().toString() + " was " + code);
                 }
@@ -275,7 +246,7 @@ public void delete() throws IOException {
             }
         } catch (Exception e) {
             logger.warning(e.getMessage());
-            throw new IOException("Error deleting: " + baseUrl + "/" + urlPath);
+            throw new IOException("Error deleting: " + baseUrl + "/" + path);
 
         }
 
@@ -284,104 +255,12 @@ public void delete() throws IOException {
 
     }
 
-    @Override
-    public Channel openAuxChannel(String auxItemTag, DataAccessOption... options) throws IOException {
-        return baseStore.openAuxChannel(auxItemTag, options);
-    }
-
-    @Override
-    public boolean isAuxObjectCached(String auxItemTag) throws IOException {
-        return baseStore.isAuxObjectCached(auxItemTag);
-    }
-
-    @Override
-    public long getAuxObjectSize(String auxItemTag) throws IOException {
-        return baseStore.getAuxObjectSize(auxItemTag);
-    }
-
-    @Override
-    public Path getAuxObjectAsPath(String auxItemTag) throws IOException {
-        return baseStore.getAuxObjectAsPath(auxItemTag);
-    }
-
-    @Override
-    public void backupAsAux(String auxItemTag) throws IOException {
-        baseStore.backupAsAux(auxItemTag);
-    }
-
-    @Override
-    public void revertBackupAsAux(String auxItemTag) throws IOException {
-        baseStore.revertBackupAsAux(auxItemTag);
-    }
-
-    @Override
-    // this method copies a local filesystem Path into this DataAccess Auxiliary
-    // location:
-    public void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException {
-        baseStore.savePathAsAux(fileSystemPath, auxItemTag);
-    }
-
-    @Override
-    public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException {
-        baseStore.saveInputStreamAsAux(inputStream, auxItemTag, filesize);
-    }
-
-    /**
-     * @param inputStream InputStream we want to save
-     * @param auxItemTag  String representing this Auxiliary type ("extension")
-     * @throws IOException if anything goes wrong.
-     */
-    @Override
-    public void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException {
-        baseStore.saveInputStreamAsAux(inputStream, auxItemTag);
-    }
-
-    @Override
-    public List<String> listAuxObjects() throws IOException {
-        return baseStore.listAuxObjects();
-    }
-
-    @Override
-    public void deleteAuxObject(String auxItemTag) throws IOException {
-        baseStore.deleteAuxObject(auxItemTag);
-    }
-
-    @Override
-    public void deleteAllAuxObjects() throws IOException {
-        baseStore.deleteAllAuxObjects();
-    }
-
-    @Override
-    public String getStorageLocation() throws IOException {
-        String fullStorageLocation = dvObject.getStorageIdentifier();
-        logger.fine("storageidentifier: " + fullStorageLocation);
-        int driverIndex = fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR);
-        if(driverIndex >=0) {
-          fullStorageLocation = fullStorageLocation.substring(fullStorageLocation.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length());
-        }
-        if (this.getDvObject() instanceof Dataset) {
-            throw new IOException("RemoteOverlayAccessIO: Datasets are not a supported dvObject");
-        } else if (this.getDvObject() instanceof DataFile) {
-            fullStorageLocation = StorageIO.getDriverPrefix(this.driverId) + fullStorageLocation;
-        } else if (dvObject instanceof Dataverse) {
-            throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject");
-        }
-        logger.fine("fullStorageLocation: " + fullStorageLocation);
-        return fullStorageLocation;
-    }
-
     @Override
     public Path getFileSystemPath() throws UnsupportedDataAccessOperationException {
         throw new UnsupportedDataAccessOperationException(
                 "RemoteOverlayAccessIO: this is a remote DataAccess IO object, it has no local filesystem path associated with it.");
     }
 
-    @Override
-    public boolean exists() {
-        logger.fine("Exists called");
-        return (getSizeFromHttpHeader() != -1);
-    }
-
     @Override
     public WritableByteChannel getWriteChannel() throws UnsupportedDataAccessOperationException {
         throw new UnsupportedDataAccessOperationException(
@@ -394,20 +273,15 @@ public OutputStream getOutputStream() throws UnsupportedDataAccessOperationExcep
                 "RemoteOverlayAccessIO: there are no output Streams associated with S3 objects.");
     }
 
-    @Override
-    public InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException {
-        return baseStore.getAuxFileAsInputStream(auxItemTag);
-    }
-
     @Override
     public boolean downloadRedirectEnabled() {
-        String optionValue = System.getProperty("dataverse.files." + this.driverId + ".download-redirect");
+        String optionValue = getConfigParam(StorageIO.DOWNLOAD_REDIRECT);
         if ("true".equalsIgnoreCase(optionValue)) {
             return true;
         }
         return false;
     }
-    
+
     public boolean downloadRedirectEnabled(String auxObjectTag) {
         return baseStore.downloadRedirectEnabled(auxObjectTag);
     }
@@ -418,11 +292,11 @@ public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliary
 
         // ToDo - support remote auxiliary Files
         if (auxiliaryTag == null) {
-            String secretKey = System.getProperty("dataverse.files." + this.driverId + ".secret-key");
+            String secretKey = getConfigParam(SECRET_KEY);
             if (secretKey == null) {
-                return baseUrl + "/" + urlPath;
+                return baseUrl + "/" + path;
             } else {
-                return UrlSignerUtil.signUrl(baseUrl + "/" + urlPath, getUrlExpirationMinutes(), null, "GET",
+                return UrlSignerUtil.signUrl(baseUrl + "/" + path, getUrlExpirationMinutes(), null, "GET",
                         secretKey);
             }
         } else {
@@ -430,27 +304,21 @@ public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliary
         }
     }
 
-    int getUrlExpirationMinutes() {
-        String optionValue = System.getProperty("dataverse.files." + this.driverId + ".url-expiration-minutes");
-        if (optionValue != null) {
-            Integer num;
-            try {
-                num = Integer.parseInt(optionValue);
-            } catch (NumberFormatException ex) {
-                num = null;
-            }
-            if (num != null) {
-                return num;
-            }
-        }
-        return 60;
-    }
 
-    private void configureStores(DataAccessRequest req, String driverId, String storageLocation) throws IOException {
-        baseUrl = System.getProperty("dataverse.files." + this.driverId + ".base-url");
+    /** This endpoint configures all the endpoints the store is allowed to reference data from. At present, the RemoteOverlayAccessIO only supports a single endpoint but
+     * the derived GlobusOverlayAccessIO can support multiple endpoints.
+     * @throws IOException
+     */
+    protected void configureRemoteEndpoints() throws IOException {
+        baseUrl = getConfigParam(BASE_URL);
         if (baseUrl == null) {
-            throw new IOException("dataverse.files." + this.driverId + ".base-url is required");
-        } else {
+            //Will accept the first endpoint using the newer setting
+            baseUrl = getConfigParam(REFERENCE_ENDPOINTS_WITH_BASEPATHS).split("\\s*,\\s*")[0];
+            if (baseUrl == null) {
+                throw new IOException("dataverse.files." + this.driverId + ".base-url is required");
+            }
+        }
+        if (baseUrl != null) {
             try {
                 new URI(baseUrl);
             } catch (Exception e) {
@@ -460,180 +328,42 @@ private void configureStores(DataAccessRequest req, String driverId, String stor
             }
 
         }
-
-        if (baseStore == null) {
-            String baseDriverId = getBaseStoreIdFor(driverId);
-            String fullStorageLocation = null;
-            String baseDriverType = System.getProperty("dataverse.files." + baseDriverId + ".type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
-            
-            if(dvObject  instanceof Dataset) {
-                baseStore = DataAccess.getStorageIO(dvObject, req, baseDriverId);
-            } else {
-                if (this.getDvObject() != null) {
-                    fullStorageLocation = getStoragePath();
-
-                    // S3 expects <id>://<bucketname>/<key>
-                    switch (baseDriverType) {
-                    case DataAccess.S3:
-                        fullStorageLocation = baseDriverId + DataAccess.SEPARATOR
-                                + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/"
-                                + fullStorageLocation;
-                        break;
-                    case DataAccess.FILE:
-                        fullStorageLocation = baseDriverId + DataAccess.SEPARATOR
-                                + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/"
-                                + fullStorageLocation;
-                        break;
-                    default:
-                        logger.warning("Not Implemented: RemoteOverlay store with base store type: "
-                                + System.getProperty("dataverse.files." + baseDriverId + ".type"));
-                        throw new IOException("Not implemented");
-                    }
-
-                } else if (storageLocation != null) {
-                    // <remoteDriverId>://<baseStorageIdentifier>//<baseUrlPath>
-                    //remoteDriverId:// is removed if coming through directStorageIO
-                    int index = storageLocation.indexOf(DataAccess.SEPARATOR);
-                    if(index > 0) {
-                        storageLocation = storageLocation.substring(index + DataAccess.SEPARATOR.length());
-                    }
-                    //THe base store needs the baseStoreIdentifier and not the relative URL
-                    fullStorageLocation = storageLocation.substring(0, storageLocation.indexOf("//"));
-
-                    switch (baseDriverType) {
-                    case DataAccess.S3:
-                        fullStorageLocation = baseDriverId + DataAccess.SEPARATOR
-                                + System.getProperty("dataverse.files." + baseDriverId + ".bucket-name") + "/"
-                                + fullStorageLocation;
-                        break;
-                    case DataAccess.FILE:
-                        fullStorageLocation = baseDriverId + DataAccess.SEPARATOR
-                                + System.getProperty("dataverse.files." + baseDriverId + ".directory", "/tmp/files") + "/"
-                                + fullStorageLocation;
-                        break;
-                    default:
-                        logger.warning("Not Implemented: RemoteOverlay store with base store type: "
-                                + System.getProperty("dataverse.files." + baseDriverId + ".type"));
-                        throw new IOException("Not implemented");
-                    }
-                }
-                baseStore = DataAccess.getDirectStorageIO(fullStorageLocation);
-            }
-            if (baseDriverType.contentEquals(DataAccess.S3)) {
-                ((S3AccessIO<?>) baseStore).setMainDriver(false);
-            }
-        }
-        remoteStoreName = System.getProperty("dataverse.files." + this.driverId + ".remote-store-name");
-        try {
-          remoteStoreUrl = new URL(System.getProperty("dataverse.files." + this.driverId + ".remote-store-url"));
-        } catch(MalformedURLException mfue) {
-            logger.fine("Unable to read remoteStoreUrl for driver: " + this.driverId);
-        }
-    }
-
-    //Convenience method to assemble the path, starting with the DOI authority/identifier/, that is needed to create a base store via DataAccess.getDirectStorageIO - the caller has to add the store type specific prefix required.
-    private String getStoragePath() throws IOException {
-        String fullStoragePath = dvObject.getStorageIdentifier();
-        logger.fine("storageidentifier: " + fullStoragePath);
-        int driverIndex = fullStoragePath.lastIndexOf(DataAccess.SEPARATOR);
-        if(driverIndex >=0) {
-          fullStoragePath = fullStoragePath.substring(fullStoragePath.lastIndexOf(DataAccess.SEPARATOR) + DataAccess.SEPARATOR.length());
-        }
-        int suffixIndex = fullStoragePath.indexOf("//");
-        if(suffixIndex >=0) {
-          fullStoragePath = fullStoragePath.substring(0, suffixIndex);
-        }
-        if (this.getDvObject() instanceof Dataset) {
-            fullStoragePath = this.getDataset().getAuthorityForFileStorage() + "/"
-                    + this.getDataset().getIdentifierForFileStorage() + "/" + fullStoragePath;
-        } else if (this.getDvObject() instanceof DataFile) {
-            fullStoragePath = this.getDataFile().getOwner().getAuthorityForFileStorage() + "/"
-                    + this.getDataFile().getOwner().getIdentifierForFileStorage() + "/" + fullStoragePath; 
-        }else if (dvObject instanceof Dataverse) {
-            throw new IOException("RemoteOverlayAccessIO: Dataverses are not a supported dvObject");
-        }
-        logger.fine("fullStoragePath: " + fullStoragePath);
-        return fullStoragePath;
-    }
-    
-    public CloseableHttpClient getSharedHttpClient() {
-        if (httpclient == null) {
-            try {
-                initHttpPool();
-                httpclient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(config).build();
-
-            } catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException ex) {
-                logger.warning(ex.getMessage());
-            }
-        }
-        return httpclient;
-    }
-
-    private void initHttpPool() throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException {
-        if (trustCerts) {
-            // use the TrustSelfSignedStrategy to allow Self Signed Certificates
-            SSLContext sslContext;
-            SSLConnectionSocketFactory connectionFactory;
-
-            sslContext = SSLContextBuilder.create().loadTrustMaterial(new TrustAllStrategy()).build();
-            // create an SSL Socket Factory to use the SSLContext with the trust self signed
-            // certificate strategy
-            // and allow all hosts verifier.
-            connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE);
-
-            Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory>create()
-                    .register("https", connectionFactory).build();
-            cm = new PoolingHttpClientConnectionManager(registry);
-        } else {
-            cm = new PoolingHttpClientConnectionManager();
-        }
-        cm.setDefaultMaxPerRoute(httpConcurrency);
-        cm.setMaxTotal(httpConcurrency > 20 ? httpConcurrency : 20);
     }
 
     @Override
     public void savePath(Path fileSystemPath) throws IOException {
         throw new UnsupportedDataAccessOperationException(
-                "RemoteOverlayAccessIO: savePath() not implemented in this storage driver.");
+                this.getClass().getName() + ": savePath() not implemented in this storage driver.");
 
     }
 
     @Override
     public void saveInputStream(InputStream inputStream) throws IOException {
         throw new UnsupportedDataAccessOperationException(
-                "RemoteOverlayAccessIO: saveInputStream() not implemented in this storage driver.");
+                this.getClass().getName() + ": saveInputStream() not implemented in this storage driver.");
 
     }
 
     @Override
     public void saveInputStream(InputStream inputStream, Long filesize) throws IOException {
         throw new UnsupportedDataAccessOperationException(
-                "RemoteOverlayAccessIO: saveInputStream(InputStream, Long) not implemented in this storage driver.");
+                this.getClass().getName() + ": saveInputStream(InputStream, Long) not implemented in this storage driver.");
 
     }
 
-    protected static boolean isValidIdentifier(String driverId, String storageId) {
+    static boolean isValidIdentifier(String driverId, String storageId) {
         String urlPath = storageId.substring(storageId.lastIndexOf("//") + 2);
-        String baseUrl = System.getProperty("dataverse.files." + driverId + ".base-url");
+        String baseUrl = getConfigParamForDriver(driverId, BASE_URL);
         try {
             URI absoluteURI = new URI(baseUrl + "/" + urlPath);
-            if(!absoluteURI.normalize().toString().startsWith(baseUrl)) {
+            if (!absoluteURI.normalize().toString().startsWith(baseUrl)) {
                 logger.warning("storageidentifier doesn't start with " + driverId + "'s base-url: " + storageId);
                 return false;
             }
-        } catch(URISyntaxException use) {
+        } catch (URISyntaxException use) {
             logger.warning("Could not interpret storageidentifier in remote store " + driverId + " : " + storageId);
             return false;
         }
         return true;
     }
-
-    public static String getBaseStoreIdFor(String driverId) {
-        return System.getProperty("dataverse.files." + driverId + ".base-store");
-    }
-
-    @Override
-    public List<String> cleanUp(Predicate<String> filter, boolean dryRun) throws IOException {
-        return baseStore.cleanUp(filter, dryRun);
-    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
index f396b07d788..8afc365417e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java
@@ -4,9 +4,11 @@
 import com.amazonaws.ClientConfiguration;
 import com.amazonaws.HttpMethod;
 import com.amazonaws.SdkClientException;
+import com.amazonaws.auth.AWSCredentialsProvider;
 import com.amazonaws.auth.AWSCredentialsProviderChain;
 import com.amazonaws.auth.AWSStaticCredentialsProvider;
 import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.auth.InstanceProfileCredentialsProvider;
 import com.amazonaws.auth.profile.ProfileCredentialsProvider;
 import com.amazonaws.client.builder.AwsClientBuilder;
 import com.amazonaws.services.s3.AmazonS3;
@@ -56,9 +58,11 @@
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Optional;
 import java.util.Random;
 import java.util.function.Predicate;
 import java.util.logging.Logger;
@@ -68,9 +72,9 @@
 import org.eclipse.microprofile.config.Config;
 import org.eclipse.microprofile.config.ConfigProvider;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.validation.constraints.NotNull;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.validation.constraints.NotNull;
 
 /**
  *
@@ -87,6 +91,16 @@ public class S3AccessIO<T extends DvObject> extends StorageIO<T> {
 
     private static final Config config = ConfigProvider.getConfig();
     private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.S3AccessIO");
+    static final String URL_EXPIRATION_MINUTES = "url-expiration-minutes";
+    static final String CUSTOM_ENDPOINT_URL = "custom-endpoint-url";
+    static final String PROXY_URL = "proxy-url";
+    static final String BUCKET_NAME = "bucket-name";
+    static final String MIN_PART_SIZE = "min-part-size";
+    static final String CUSTOM_ENDPOINT_REGION = "custom-endpoint-region";
+    static final String PATH_STYLE_ACCESS = "path-style-access";
+    static final String PAYLOAD_SIGNING = "payload-signing";
+    static final String CHUNKED_ENCODING = "chunked-encoding";
+    static final String PROFILE = "profile";
     
     private boolean mainDriver = true;
 
@@ -102,19 +116,18 @@ public S3AccessIO(T dvObject, DataAccessRequest req, String driverId) {
             minPartSize = getMinPartSize(driverId);
             s3=getClient(driverId);
             tm=getTransferManager(driverId);
-            endpoint = System.getProperty("dataverse.files." + driverId + ".custom-endpoint-url", "");
-            proxy = System.getProperty("dataverse.files." + driverId + ".proxy-url", "");
+            endpoint = getConfigParam(CUSTOM_ENDPOINT_URL, "");
+            proxy = getConfigParam(PROXY_URL, "");
             if(!StringUtil.isEmpty(proxy)&&StringUtil.isEmpty(endpoint)) {
                 logger.severe(driverId + " config error: Must specify a custom-endpoint-url if proxy-url is specified");
             }
-            //Not sure this is needed but moving it from the open method for now since it definitely doesn't need to run every time an object is opened.
-            try {
-                if (bucketName == null || !s3.doesBucketExistV2(bucketName)) {
-                    throw new IOException("ERROR: S3AccessIO - You must create and configure a bucket before creating datasets.");
-                }
-            } catch (SdkClientException sce) {
-                throw new IOException("ERROR: S3AccessIO - Failed to look up bucket "+bucketName+" (is AWS properly configured?): " + sce.getMessage());
-            }
+
+            // FWIW: There used to be a check here to see if the bucket exists.
+            // It was very redundant (checking every time we access any file) and didn't do
+            // much but potentially make the failure (in the unlikely case a bucket doesn't
+            // exist/just disappeared) happen slightly earlier (here versus at the first
+            // file/metadata access).
+                    
         } catch (Exception e) {
             throw new AmazonClientException(
                         "Cannot instantiate a S3 client; check your AWS credentials and region",
@@ -190,7 +203,7 @@ public void open(DataAccessOption... options) throws IOException {
                     }
                 } // else we're OK (assumes bucket name in storageidentifier matches the driver's bucketname)
             } else {
-                if(!storageIdentifier.substring((this.driverId + DataAccess.SEPARATOR).length()).contains(":")) {
+                if(!storageIdentifier.contains(":")) {
                     //No driver id or bucket 
                     newStorageIdentifier= this.driverId + DataAccess.SEPARATOR + bucketName + ":" + storageIdentifier;
                 } else {
@@ -206,14 +219,7 @@ public void open(DataAccessOption... options) throws IOException {
 
             
             if (isReadAccess) {
-                key = getMainFileKey();
-                ObjectMetadata objectMetadata = null; 
-                try {
-                    objectMetadata = s3.getObjectMetadata(bucketName, key);
-                } catch (SdkClientException sce) {
-                    throw new IOException("Cannot get S3 object " + key + " ("+sce.getMessage()+")");
-                }
-                this.setSize(objectMetadata.getContentLength());
+                this.setSize(retrieveSizeFromMedia());
 
                 if (dataFile.getContentType() != null
                         && dataFile.getContentType().equals("text/tab-separated-values")
@@ -848,7 +854,7 @@ private static String getMainFileKey(String baseKey, String storageIdentifier, S
 
     @Override
     public boolean downloadRedirectEnabled() {
-        String optionValue = System.getProperty("dataverse.files." + this.driverId + ".download-redirect");
+        String optionValue = getConfigParam(DOWNLOAD_REDIRECT);
         if ("true".equalsIgnoreCase(optionValue)) {
             return true;
         }
@@ -1072,7 +1078,7 @@ public JsonObjectBuilder generateTemporaryS3UploadUrls(String globalId, String s
     }
     
     int getUrlExpirationMinutes() {
-        String optionValue = System.getProperty("dataverse.files." + this.driverId + ".url-expiration-minutes"); 
+        String optionValue = getConfigParam(URL_EXPIRATION_MINUTES); 
         if (optionValue != null) {
             Integer num; 
             try {
@@ -1088,7 +1094,7 @@ int getUrlExpirationMinutes() {
     }
     
     private static String getBucketName(String driverId) {
-        return System.getProperty("dataverse.files." + driverId + ".bucket-name");
+        return getConfigParamForDriver(driverId, BUCKET_NAME);
     }
     
     private static long getMinPartSize(String driverId) {
@@ -1096,7 +1102,7 @@ private static long getMinPartSize(String driverId) {
         // (minimum allowed is 5*1024**2 but it probably isn't worth the complexity starting at ~5MB. Also -  confirmed that they use base 2 definitions)
         long min = 5 * 1024 * 1024l; 
 
-        String partLength = System.getProperty("dataverse.files." + driverId + ".min-part-size");
+        String partLength = getConfigParamForDriver(driverId, MIN_PART_SIZE);
         try {
             if (partLength != null) {
                 long val = Long.parseLong(partLength);
@@ -1145,12 +1151,12 @@ private static AmazonS3 getClient(String driverId) {
              * Pass in a URL pointing to your S3 compatible storage.
              * For possible values see https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/client/builder/AwsClientBuilder.EndpointConfiguration.html
              */
-            String s3CEUrl = System.getProperty("dataverse.files." + driverId + ".custom-endpoint-url", "");
+            String s3CEUrl = getConfigParamForDriver(driverId, CUSTOM_ENDPOINT_URL, "");
             /**
              * Pass in a region to use for SigV4 signing of requests.
              * Defaults to "dataverse" as it is not relevant for custom S3 implementations.
              */
-            String s3CERegion = System.getProperty("dataverse.files." + driverId + ".custom-endpoint-region", "dataverse");
+            String s3CERegion = getConfigParamForDriver(driverId, CUSTOM_ENDPOINT_REGION, "dataverse");
 
             // if the admin has set a system property (see below) we use this endpoint URL instead of the standard ones.
             if (!s3CEUrl.isEmpty()) {
@@ -1160,7 +1166,7 @@ private static AmazonS3 getClient(String driverId) {
              * Pass in a boolean value if path style access should be used within the S3 client.
              * Anything but case-insensitive "true" will lead to value of false, which is default value, too.
              */
-            Boolean s3pathStyleAccess = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".path-style-access", "false"));
+            Boolean s3pathStyleAccess = Boolean.parseBoolean(getConfigParamForDriver(driverId, PATH_STYLE_ACCESS, "false"));
             // some custom S3 implementations require "PathStyleAccess" as they us a path, not a subdomain. default = false
             s3CB.withPathStyleAccessEnabled(s3pathStyleAccess);
 
@@ -1168,37 +1174,70 @@ private static AmazonS3 getClient(String driverId) {
              * Pass in a boolean value if payload signing should be used within the S3 client.
              * Anything but case-insensitive "true" will lead to value of false, which is default value, too.
              */
-            Boolean s3payloadSigning = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".payload-signing","false"));
+            Boolean s3payloadSigning = Boolean.parseBoolean(getConfigParamForDriver(driverId, PAYLOAD_SIGNING,"false"));
             /**
              * Pass in a boolean value if chunked encoding should not be used within the S3 client.
              * Anything but case-insensitive "false" will lead to value of true, which is default value, too.
              */
-            Boolean s3chunkedEncoding = Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".chunked-encoding","true"));
+            Boolean s3chunkedEncoding = Boolean.parseBoolean(getConfigParamForDriver(driverId, CHUNKED_ENCODING,"true"));
             // Openstack SWIFT S3 implementations require "PayloadSigning" set to true. default = false
             s3CB.setPayloadSigningEnabled(s3payloadSigning);
             // Openstack SWIFT S3 implementations require "ChunkedEncoding" set to false. default = true
             // Boolean is inverted, otherwise setting dataverse.files.<id>.chunked-encoding=false would result in leaving Chunked Encoding enabled
             s3CB.setChunkedEncodingDisabled(!s3chunkedEncoding);
 
-            /**
-             * Pass in a string value if this storage driver should use a non-default AWS S3 profile.
-             * The default is "default" which should work when only one profile exists.
+            /** Configure credentials for the S3 client. There are multiple mechanisms available. 
+             * Role-based/instance credentials are globally defined while the other mechanisms (profile, static)
+             * are defined per store. The logic below assures that 
+             * * if a store specific profile or static credentials are explicitly set, they will be used in preference to the global role-based credentials. 
+             * * if a store specific role-based credentials are explicitly set, they will be used in preference to the global instance credentials,
+             * * if a profile and static credentials are both explicitly set, the profile will be used preferentially, and 
+             * * if no store-specific credentials are set, the global credentials will be preferred over using any "default" profile credentials that are found.
              */
-            String s3profile = System.getProperty("dataverse.files." + driverId + ".profile","default");
-            ProfileCredentialsProvider profileCredentials = new ProfileCredentialsProvider(s3profile);
-    
-            // Try to retrieve credentials via Microprofile Config API, too. For production use, you should not use env
-            // vars or system properties to provide these, but use the secrets config source provided by Payara.
-            AWSStaticCredentialsProvider staticCredentials = new AWSStaticCredentialsProvider(
-                new BasicAWSCredentials(
-                    config.getOptionalValue("dataverse.files." + driverId + ".access-key", String.class).orElse(""),
-                    config.getOptionalValue("dataverse.files." + driverId + ".secret-key", String.class).orElse("")
-                ));
-            
-            // Add both providers to chain - the first working provider will be used (so static credentials are the fallback)
-            AWSCredentialsProviderChain providerChain = new AWSCredentialsProviderChain(profileCredentials, staticCredentials);
+
+            ArrayList<AWSCredentialsProvider> providers = new ArrayList<>();
+
+            String s3profile = getConfigParamForDriver(driverId, PROFILE);
+            boolean allowInstanceCredentials = true;
+            // Assume that instance credentials should not be used if the profile is
+            // actually set for this store or if static creds are provided (below).
+            if (s3profile != null) {
+                allowInstanceCredentials = false;
+            }
+            // Try to retrieve credentials via Microprofile Config API, too. For production
+            // use, you should not use env vars or system properties to provide these, but 
+            // use the secrets config source provided by Payara.
+            Optional<String> accessKey = config.getOptionalValue("dataverse.files." + driverId + ".access-key", String.class);
+            Optional<String> secretKey = config.getOptionalValue("dataverse.files." + driverId + ".secret-key", String.class);
+            if (accessKey.isPresent() && secretKey.isPresent()) {
+                allowInstanceCredentials = false;
+                AWSStaticCredentialsProvider staticCredentials = new AWSStaticCredentialsProvider(
+                        new BasicAWSCredentials(
+                                accessKey.get(),
+                                secretKey.get()));
+                providers.add(staticCredentials);
+            } else if (s3profile == null) {
+                //Only use the default profile when it isn't explicitly set for this store when there are no static creds (otherwise it will be preferred).
+                s3profile = "default";
+            }
+            if (s3profile != null) {
+                providers.add(new ProfileCredentialsProvider(s3profile));
+            }
+
+            if (allowInstanceCredentials) {
+                // Add role-based provider as in the default provider chain
+                providers.add(InstanceProfileCredentialsProvider.getInstance());
+            }
+            // Add all providers to chain - the first working provider will be used
+            // (role-based is first in the default cred provider chain (if no profile or
+            // static creds are explicitly set for the store), so we're just
+            // reproducing that, then profile, then static credentials as the fallback)
+
+            // As the order is the reverse of how we added providers, we reverse the list here
+            Collections.reverse(providers);
+            AWSCredentialsProviderChain providerChain = new AWSCredentialsProviderChain(providers);
             s3CB.setCredentials(providerChain);
-            
+
             // let's build the client :-)
             AmazonS3 client =  s3CB.build();
             driverClientMap.put(driverId,  client);
@@ -1380,4 +1419,20 @@ public List<String> cleanUp(Predicate<String> filter, boolean dryRun) throws IOE
         }
         return toDelete;
     }
-}
\ No newline at end of file
+
+    @Override
+    public long retrieveSizeFromMedia() throws IOException {
+        key = getMainFileKey();
+        ObjectMetadata objectMetadata = null;
+        try {
+            objectMetadata = s3.getObjectMetadata(bucketName, key);
+        } catch (SdkClientException sce) {
+            throw new IOException("Cannot get S3 object " + key + " (" + sce.getMessage() + ")");
+        }
+        return objectMetadata.getContentLength();
+    }
+    
+    public static String getNewIdentifier(String driverId) {
+        return driverId + DataAccess.SEPARATOR + getConfigParamForDriver(driverId, BUCKET_NAME) + ":" + FileUtil.generateStorageIdentifier();
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java
index bfd5c5f0d8f..51cdecf64a0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java
@@ -20,12 +20,12 @@
 
 package edu.harvard.iq.dataverse.dataaccess;
 
-
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
+import edu.harvard.iq.dataverse.util.FileUtil;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -43,7 +43,6 @@
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-
 /**
  *
  * @author Leonid Andreev
@@ -52,18 +51,27 @@
 
 public abstract class StorageIO<T extends DvObject> {
 
+    static final String INGEST_SIZE_LIMIT = "ingestsizelimit";
+    static final String PUBLIC = "public";
+    static final String TYPE = "type";
+    static final String UPLOAD_REDIRECT = "upload-redirect";
+    static final String UPLOAD_OUT_OF_BAND = "upload-out-of-band";
+    protected static final String DOWNLOAD_REDIRECT = "download-redirect";
+    protected static final String DATAVERSE_INACCESSIBLE = "dataverse-inaccessible";
+
+
     public StorageIO() {
 
     }
-    
+
     public StorageIO(String storageLocation, String driverId) {
-      this.driverId=driverId;
+        this.driverId = driverId;
     }
 
     public StorageIO(T dvObject, DataAccessRequest req, String driverId) {
         this.dvObject = dvObject;
         this.req = req;
-        this.driverId=driverId;
+        this.driverId = driverId;
         if (this.req == null) {
             this.req = new DataAccessRequest();
         }
@@ -72,18 +80,19 @@ public StorageIO(T dvObject, DataAccessRequest req, String driverId) {
         }
     }
 
-    
-    
     // Abstract methods to be implemented by the storage drivers:
 
     public abstract void open(DataAccessOption... option) throws IOException;
 
     protected boolean isReadAccess = false;
     protected boolean isWriteAccess = false;
-    //A  public store is one in which files may be accessible outside Dataverse and therefore accessible without regard to Dataverse's access controls related to restriction and embargoes.
-    //Currently, this is just used to warn users at upload time rather than disable restriction/embargo. 
+    // A public store is one in which files may be accessible outside Dataverse and
+    // therefore accessible without regard to Dataverse's access controls related to
+    // restriction and embargoes.
+    // Currently, this is just used to warn users at upload time rather than disable
+    // restriction/embargo.
     static protected Map<String, Boolean> driverPublicAccessMap = new HashMap<String, Boolean>();
-    
+
     public boolean canRead() {
         return isReadAccess;
     }
@@ -94,115 +103,118 @@ public boolean canWrite() {
 
     public abstract String getStorageLocation() throws IOException;
 
-    // This method will return a Path, if the storage method is a 
-    // local filesystem. Otherwise should throw an IOException. 
+    // This method will return a Path, if the storage method is a
+    // local filesystem. Otherwise should throw an IOException.
     public abstract Path getFileSystemPath() throws IOException;
-        
-    public abstract boolean exists() throws IOException; 
-        
+
+    public abstract boolean exists() throws IOException;
+
     public abstract void delete() throws IOException;
-    
+
     // this method for copies a local Path (for ex., a
     // temp file, into this DataAccess location):
     public abstract void savePath(Path fileSystemPath) throws IOException;
-    
+
     // same, for an InputStream:
     /**
-     * This method copies a local InputStream into this DataAccess location.
-     * Note that the S3 driver implementation of this abstract method is problematic, 
-     * because S3 cannot save an object of an unknown length. This effectively 
-     * nullifies any benefits of streaming; as we cannot start saving until we 
-     * have read the entire stream. 
-     * One way of solving this would be to buffer the entire stream as byte[], 
-     * in memory, then save it... Which of course would be limited by the amount 
-     * of memory available, and thus would not work for streams larger than that. 
-     * So we have eventually decided to save save the stream to a temp file, then 
-     * save to S3. This is slower, but guaranteed to work on any size stream. 
-     * An alternative we may want to consider is to not implement this method 
-     * in the S3 driver, and make it throw the UnsupportedDataAccessOperationException, 
-     * similarly to how we handle attempts to open OutputStreams, in this and the 
-     * Swift driver. 
-     * (Not an issue in either FileAccessIO or SwiftAccessIO implementations)
+     * This method copies a local InputStream into this DataAccess location. Note
+     * that the S3 driver implementation of this abstract method is problematic,
+     * because S3 cannot save an object of an unknown length. This effectively
+     * nullifies any benefits of streaming; as we cannot start saving until we have
+     * read the entire stream. One way of solving this would be to buffer the entire
+     * stream as byte[], in memory, then save it... Which of course would be limited
+     * by the amount of memory available, and thus would not work for streams larger
+     * than that. So we have eventually decided to save save the stream to a temp
+     * file, then save to S3. This is slower, but guaranteed to work on any size
+     * stream. An alternative we may want to consider is to not implement this
+     * method in the S3 driver, and make it throw the
+     * UnsupportedDataAccessOperationException, similarly to how we handle attempts
+     * to open OutputStreams, in this and the Swift driver. (Not an issue in either
+     * FileAccessIO or SwiftAccessIO implementations)
      * 
      * @param inputStream InputStream we want to save
-     * @param auxItemTag String representing this Auxiliary type ("extension")
+     * @param auxItemTag  String representing this Auxiliary type ("extension")
      * @throws IOException if anything goes wrong.
-    */
+     */
     public abstract void saveInputStream(InputStream inputStream) throws IOException;
+
     public abstract void saveInputStream(InputStream inputStream, Long filesize) throws IOException;
-    
+
     // Auxiliary File Management: (new as of 4.0.2!)
-    
+
     // An "auxiliary object" is an abstraction of the traditional DVN/Dataverse
-    // mechanism of storing extra files related to the man StudyFile/DataFile - 
-    // such as "saved original" and cached format conversions for tabular files, 
-    // thumbnails for images, etc. - in physical files with the same file 
-    // name but various reserved extensions. 
-   
-    //This function retrieves auxiliary files related to datasets, and returns them as inputstream
-    public abstract InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException ;
-    
+    // mechanism of storing extra files related to the man StudyFile/DataFile -
+    // such as "saved original" and cached format conversions for tabular files,
+    // thumbnails for images, etc. - in physical files with the same file
+    // name but various reserved extensions.
+
+    // This function retrieves auxiliary files related to datasets, and returns them
+    // as inputstream
+    public abstract InputStream getAuxFileAsInputStream(String auxItemTag) throws IOException;
+
     public abstract Channel openAuxChannel(String auxItemTag, DataAccessOption... option) throws IOException;
-    
-    public abstract long getAuxObjectSize(String auxItemTag) throws IOException; 
-    
-    public abstract Path getAuxObjectAsPath(String auxItemTag) throws IOException; 
-    
-    public abstract boolean isAuxObjectCached(String auxItemTag) throws IOException; 
-    
-    public abstract void backupAsAux(String auxItemTag) throws IOException; 
-    
-    public abstract void revertBackupAsAux(String auxItemTag) throws IOException; 
-    
-    // this method copies a local filesystem Path into this DataAccess Auxiliary location:
+
+    public abstract long getAuxObjectSize(String auxItemTag) throws IOException;
+
+    public abstract Path getAuxObjectAsPath(String auxItemTag) throws IOException;
+
+    public abstract boolean isAuxObjectCached(String auxItemTag) throws IOException;
+
+    public abstract void backupAsAux(String auxItemTag) throws IOException;
+
+    public abstract void revertBackupAsAux(String auxItemTag) throws IOException;
+
+    // this method copies a local filesystem Path into this DataAccess Auxiliary
+    // location:
     public abstract void savePathAsAux(Path fileSystemPath, String auxItemTag) throws IOException;
-    
+
     /**
-     * This method copies a local InputStream into this DataAccess Auxiliary location.
-     * Note that the S3 driver implementation of this abstract method is problematic, 
-     * because S3 cannot save an object of an unknown length. This effectively 
-     * nullifies any benefits of streaming; as we cannot start saving until we 
-     * have read the entire stream. 
-     * One way of solving this would be to buffer the entire stream as byte[], 
-     * in memory, then save it... Which of course would be limited by the amount 
-     * of memory available, and thus would not work for streams larger than that. 
-     * So we have eventually decided to save save the stream to a temp file, then 
-     * save to S3. This is slower, but guaranteed to work on any size stream. 
-     * An alternative we may want to consider is to not implement this method 
-     * in the S3 driver, and make it throw the UnsupportedDataAccessOperationException, 
-     * similarly to how we handle attempts to open OutputStreams, in this and the 
-     * Swift driver. 
-     * (Not an issue in either FileAccessIO or SwiftAccessIO implementations)
+     * This method copies a local InputStream into this DataAccess Auxiliary
+     * location. Note that the S3 driver implementation of this abstract method is
+     * problematic, because S3 cannot save an object of an unknown length. This
+     * effectively nullifies any benefits of streaming; as we cannot start saving
+     * until we have read the entire stream. One way of solving this would be to
+     * buffer the entire stream as byte[], in memory, then save it... Which of
+     * course would be limited by the amount of memory available, and thus would not
+     * work for streams larger than that. So we have eventually decided to save save
+     * the stream to a temp file, then save to S3. This is slower, but guaranteed to
+     * work on any size stream. An alternative we may want to consider is to not
+     * implement this method in the S3 driver, and make it throw the
+     * UnsupportedDataAccessOperationException, similarly to how we handle attempts
+     * to open OutputStreams, in this and the Swift driver. (Not an issue in either
+     * FileAccessIO or SwiftAccessIO implementations)
      * 
      * @param inputStream InputStream we want to save
-     * @param auxItemTag String representing this Auxiliary type ("extension")
+     * @param auxItemTag  String representing this Auxiliary type ("extension")
      * @throws IOException if anything goes wrong.
-    */
-    public abstract void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException; 
-    public abstract void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize) throws IOException;
-    
-    public abstract List<String>listAuxObjects() throws IOException;
-    
-    public abstract void deleteAuxObject(String auxItemTag) throws IOException; 
-    
+     */
+    public abstract void saveInputStreamAsAux(InputStream inputStream, String auxItemTag) throws IOException;
+
+    public abstract void saveInputStreamAsAux(InputStream inputStream, String auxItemTag, Long filesize)
+            throws IOException;
+
+    public abstract List<String> listAuxObjects() throws IOException;
+
+    public abstract void deleteAuxObject(String auxItemTag) throws IOException;
+
     public abstract void deleteAllAuxObjects() throws IOException;
 
     private DataAccessRequest req;
     private InputStream in = null;
-    private OutputStream out; 
+    private OutputStream out;
     protected Channel channel;
     protected DvObject dvObject;
     protected String driverId;
 
-    /*private int status;*/
+    /* private int status; */
     private long size;
 
     /**
-     * Where in the file to seek to when reading (default is zero bytes, the
-     * start of the file).
+     * Where in the file to seek to when reading (default is zero bytes, the start
+     * of the file).
      */
     private long offset;
-    
+
     private String mimeType;
     private String fileName;
     private String varHeader;
@@ -215,8 +227,8 @@ public boolean canWrite() {
     private String swiftContainerName;
 
     private boolean isLocalFile = false;
-    /*private boolean isRemoteAccess = false;*/
-    /*private boolean isHttpAccess = false;*/
+    /* private boolean isRemoteAccess = false; */
+    /* private boolean isHttpAccess = false; */
     private boolean noVarHeader = false;
 
     // For remote downloads:
@@ -229,13 +241,14 @@ public boolean canWrite() {
     private String remoteUrl;
     protected String remoteStoreName = null;
     protected URL remoteStoreUrl = null;
-    
+
     // For HTTP-based downloads:
-    /*private GetMethod method = null;
-    private Header[] responseHeaders;*/
+    /*
+     * private GetMethod method = null; private Header[] responseHeaders;
+     */
 
     // getters:
-    
+
     public Channel getChannel() throws IOException {
         return channel;
     }
@@ -255,16 +268,15 @@ public ReadableByteChannel getReadChannel() throws IOException {
 
         return (ReadableByteChannel) channel;
     }
-    
-    public DvObject getDvObject()
-    {
+
+    public DvObject getDvObject() {
         return dvObject;
     }
-    
+
     public DataFile getDataFile() {
         return (DataFile) dvObject;
     }
-    
+
     public Dataset getDataset() {
         return (Dataset) dvObject;
     }
@@ -277,9 +289,9 @@ public DataAccessRequest getRequest() {
         return req;
     }
 
-    /*public int getStatus() {
-        return status;
-    }*/
+    /*
+     * public int getStatus() { return status; }
+     */
 
     public long getSize() {
         return size;
@@ -292,9 +304,9 @@ public long getOffset() {
     public InputStream getInputStream() throws IOException {
         return in;
     }
-    
+
     public OutputStream getOutputStream() throws IOException {
-        return out; 
+        return out;
     }
 
     public String getMimeType() {
@@ -317,23 +329,23 @@ public String getRemoteUrl() {
         return remoteUrl;
     }
 
-    public String getTemporarySwiftUrl(){
+    public String getTemporarySwiftUrl() {
         return temporarySwiftUrl;
     }
-    
+
     public String getTempUrlExpiry() {
         return tempUrlExpiry;
     }
-    
+
     public String getTempUrlSignature() {
         return tempUrlSignature;
     }
-    
+
     public String getSwiftFileName() {
         return swiftFileName;
     }
 
-    public String getSwiftContainerName(){
+    public String getSwiftContainerName() {
         return swiftContainerName;
     }
 
@@ -344,34 +356,32 @@ public String getRemoteStoreName() {
     public URL getRemoteStoreUrl() {
         return remoteStoreUrl;
     }
-    
-    /*public GetMethod getHTTPMethod() {
-        return method;
-    }
 
-    public Header[] getResponseHeaders() {
-        return responseHeaders;
-    }*/
+    /*
+     * public GetMethod getHTTPMethod() { return method; }
+     * 
+     * public Header[] getResponseHeaders() { return responseHeaders; }
+     */
 
     public boolean isLocalFile() {
         return isLocalFile;
     }
-    
-    // "Direct Access" StorageIO is used to access a physical storage 
-    // location not associated with any dvObject. (For example, when we 
-    // are deleting a physical file left behind by a DataFile that's 
-    // already been deleted from the database). 
+
+    // "Direct Access" StorageIO is used to access a physical storage
+    // location not associated with any dvObject. (For example, when we
+    // are deleting a physical file left behind by a DataFile that's
+    // already been deleted from the database).
     public boolean isDirectAccess() {
-        return dvObject == null; 
+        return dvObject == null;
     }
 
-    /*public boolean isRemoteAccess() {
-        return isRemoteAccess;
-    }*/
+    /*
+     * public boolean isRemoteAccess() { return isRemoteAccess; }
+     */
 
-    /*public boolean isHttpAccess() {
-        return isHttpAccess;
-    }*/
+    /*
+     * public boolean isHttpAccess() { return isHttpAccess; }
+     */
 
     public boolean isDownloadSupported() {
         return isDownloadSupported;
@@ -398,9 +408,9 @@ public void setRequest(DataAccessRequest dar) {
         req = dar;
     }
 
-    /*public void setStatus(int s) {
-        status = s;
-    }*/
+    /*
+     * public void setStatus(int s) { status = s; }
+     */
 
     public void setSize(long s) {
         size = s;
@@ -421,11 +431,11 @@ public void setOffset(long offset) throws IOException {
     public void setInputStream(InputStream is) {
         in = is;
     }
-    
+
     public void setOutputStream(OutputStream os) {
-        out = os; 
-    } 
-    
+        out = os;
+    }
+
     public void setChannel(Channel c) {
         channel = c;
     }
@@ -450,45 +460,46 @@ public void setRemoteUrl(String u) {
         remoteUrl = u;
     }
 
-    public void setTemporarySwiftUrl(String u){
+    public void setTemporarySwiftUrl(String u) {
         temporarySwiftUrl = u;
     }
-    
-    public void setTempUrlExpiry(Long u){
+
+    public void setTempUrlExpiry(Long u) {
         tempUrlExpiry = String.valueOf(u);
     }
-    
+
     public void setSwiftFileName(String u) {
         swiftFileName = u;
     }
-    
-    public void setTempUrlSignature(String u){
+
+    public void setTempUrlSignature(String u) {
         tempUrlSignature = u;
     }
 
-    public void setSwiftContainerName(String u){
+    public void setSwiftContainerName(String u) {
         swiftContainerName = u;
     }
 
-    /*public void setHTTPMethod(GetMethod hm) {
-        method = hm;
-    }*/
+    /*
+     * public void setHTTPMethod(GetMethod hm) { method = hm; }
+     */
 
-    /*public void setResponseHeaders(Header[] headers) {
-        responseHeaders = headers;
-    }*/
+    /*
+     * public void setResponseHeaders(Header[] headers) { responseHeaders = headers;
+     * }
+     */
 
     public void setIsLocalFile(boolean f) {
         isLocalFile = f;
     }
 
-    /*public void setIsRemoteAccess(boolean r) {
-        isRemoteAccess = r;
-    }*/
+    /*
+     * public void setIsRemoteAccess(boolean r) { isRemoteAccess = r; }
+     */
 
-    /*public void setIsHttpAccess(boolean h) {
-        isHttpAccess = h;
-    }*/
+    /*
+     * public void setIsHttpAccess(boolean h) { isHttpAccess = h; }
+     */
 
     public void setIsDownloadSupported(boolean d) {
         isDownloadSupported = d;
@@ -506,12 +517,11 @@ public void setNoVarHeader(boolean nvh) {
         noVarHeader = nvh;
     }
 
-        // connection management methods:
-    /*public void releaseConnection() {
-        if (method != null) {
-            method.releaseConnection();
-        }
-    }*/
+    // connection management methods:
+    /*
+     * public void releaseConnection() { if (method != null) {
+     * method.releaseConnection(); } }
+     */
 
     public void closeInputStream() {
         if (in != null) {
@@ -528,7 +538,7 @@ public void closeInputStream() {
             }
         }
     }
-    
+
     public String generateVariableHeader(List<DataVariable> dvs) {
         String varHeader = null;
 
@@ -571,14 +581,14 @@ protected boolean isWriteAccessRequested(DataAccessOption... options) throws IOE
         return false;
     }
 
-	public boolean isBelowIngestSizeLimit() {
-		long limit = Long.parseLong(System.getProperty("dataverse.files." + this.driverId + ".ingestsizelimit", "-1"));
-		if(limit>0 && getSize()>limit) {
-			return false;
-		} else {
-		    return true;
-		}
-	}
+    public boolean isBelowIngestSizeLimit() {
+        long limit = Long.parseLong(getConfigParam(INGEST_SIZE_LIMIT, "-1"));
+        if (limit > 0 && getSize() > limit) {
+            return false;
+        } else {
+            return true;
+        }
+    }
 
     public boolean downloadRedirectEnabled() {
         return false;
@@ -587,35 +597,43 @@ public boolean downloadRedirectEnabled() {
     public boolean downloadRedirectEnabled(String auxObjectTag) {
         return false;
     }
-    
-    public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliaryType, String auxiliaryFileName) throws IOException {
+
+    public String generateTemporaryDownloadUrl(String auxiliaryTag, String auxiliaryType, String auxiliaryFileName)
+            throws IOException {
         throw new UnsupportedDataAccessOperationException("Direct download not implemented for this storage type");
     }
-    
 
     public static boolean isPublicStore(String driverId) {
-        //Read once and cache
-        if(!driverPublicAccessMap.containsKey(driverId)) {
-            driverPublicAccessMap.put(driverId, Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".public")));
+        // Read once and cache
+        if (!driverPublicAccessMap.containsKey(driverId)) {
+            driverPublicAccessMap.put(driverId,
+                    Boolean.parseBoolean(getConfigParamForDriver(driverId, PUBLIC)));
         }
         return driverPublicAccessMap.get(driverId);
     }
-    
+
     public static String getDriverPrefix(String driverId) {
-        return driverId+ DataAccess.SEPARATOR;
+        return driverId + DataAccess.SEPARATOR;
     }
-    
+
     public static boolean isDirectUploadEnabled(String driverId) {
-        return Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect"));
+        return (getConfigParamForDriver(driverId, TYPE).equals(DataAccess.S3)
+                && Boolean.parseBoolean(getConfigParamForDriver(driverId, UPLOAD_REDIRECT)))
+                || Boolean.parseBoolean(getConfigParamForDriver(driverId, UPLOAD_OUT_OF_BAND));
+    }
+
+    //True by default, Stores (e.g. RemoteOverlay, Globus) can set this false to stop attempts to read bytes
+    public static boolean isDataverseAccessible(String driverId) {
+        return (true && !Boolean.parseBoolean(getConfigParamForDriver(driverId, DATAVERSE_INACCESSIBLE)));
     }
     
-    //Check that storageIdentifier is consistent with store's config
-    //False will prevent direct uploads
-    protected static boolean isValidIdentifier(String driverId, String storageId) {
+    // Check that storageIdentifier is consistent with store's config
+    // False will prevent direct uploads
+    static boolean isValidIdentifier(String driverId, String storageId) {
         return false;
     }
-    
-    //Utility to verify the standard UUID pattern for stored files.
+
+    // Utility to verify the standard UUID pattern for stored files.
     protected static boolean usesStandardNamePattern(String identifier) {
 
         Pattern r = Pattern.compile("^[a-f,0-9]{11}-[a-f,0-9]{12}$");
@@ -625,4 +643,44 @@ protected static boolean usesStandardNamePattern(String identifier) {
 
     public abstract List<String> cleanUp(Predicate<String> filter, boolean dryRun) throws IOException;
 
+    /**
+     * A storage-type-specific mechanism for retrieving the size of a file. Intended
+     * primarily as a way to get the size before it has been recorded in the
+     * database, e.g. during direct/out-of-band transfers but could be useful to
+     * check the db values.
+     * 
+     * @return file size in bytes
+     * @throws IOException 
+     */
+    public abstract long retrieveSizeFromMedia() throws IOException;
+    
+    
+    /* Convenience methods to get a driver-specific parameter
+     * 
+     * - with or without a default
+     * - static or per object
+     * 
+     * @param parameterName
+     * @return the parameter value
+     */
+    
+    protected String getConfigParam(String parameterName) {
+        return getConfigParam(parameterName, null);
+    }
+
+    protected String getConfigParam(String parameterName, String defaultValue) {
+        return getConfigParamForDriver(this.driverId, parameterName, defaultValue);
+    }
+
+    protected static String getConfigParamForDriver(String driverId, String parameterName) {
+        return getConfigParamForDriver(driverId, parameterName, null);
+    }
+    protected static String getConfigParamForDriver(String driverId, String parameterName, String defaultValue) {
+        return System.getProperty("dataverse.files." + driverId + "." + parameterName, defaultValue);
+    }
+    
+    public static String getNewIdentifier(String driverId) {
+        return driverId + DataAccess.SEPARATOR + FileUtil.generateStorageIdentifier();
+    }
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java
index 6c84009de3e..105a60ab418 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIO.java
@@ -50,6 +50,17 @@ public class SwiftAccessIO<T extends DvObject> extends StorageIO<T> {
     private String swiftLocation; 
 
     private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.dataaccess.SwiftAccessIO");
+    private static final String IS_PUBLIC_CONTAINER = "isPublicContainer";
+    private static final String FOLDER_PATH_SEPARATOR = "folderPathSeparator";
+    private static final String DEFAULT_ENDPOINT = "defaultEndpoint";
+    private static final String TEMPORARY_URL_EXPIRY_TIME = "temporaryUrlExpiryTime";
+    private static final String AUTH_URL = "authUrl";
+    private static final String USERNAME = "username";
+    private static final String PASSWORD = "password";
+    private static final String TENANT = "tenant";
+    private static final String AUTH_TYPE = "authType";
+    private static final String HASH_KEY = "hashKey";
+    private static final String ENDPOINT = "endpoint";
 
 	public SwiftAccessIO() {
 		//Partially functional StorageIO object - constructor only for testing
@@ -70,10 +81,10 @@ public SwiftAccessIO(String swiftLocation, String driverId) {
     }
 
     private void readSettings() {
-    	isPublicContainer = Boolean.parseBoolean(System.getProperty("dataverse.files." + this.driverId + ".isPublicContainer", "true"));
-        swiftFolderPathSeparator = System.getProperty("dataverse.files." + this.driverId + ".folderPathSeparator", "_");
-        swiftDefaultEndpoint = System.getProperty("dataverse.files." + this.driverId + ".defaultEndpoint");
-        tempUrlExpires = Integer.parseInt(System.getProperty("dataverse.files." + this.driverId + ".temporaryUrlExpiryTime", "60"));
+    	isPublicContainer = Boolean.parseBoolean(getConfigParam(IS_PUBLIC_CONTAINER, "true"));
+        swiftFolderPathSeparator = getConfigParam(FOLDER_PATH_SEPARATOR, "_");
+        swiftDefaultEndpoint = getConfigParam(DEFAULT_ENDPOINT);
+        tempUrlExpires = Integer.parseInt(getConfigParam(TEMPORARY_URL_EXPIRY_TIME, "60"));
 		
 	}
 
@@ -740,12 +751,12 @@ private StoredObject openSwiftAuxFile(boolean writeAccess, String auxItemTag) th
     }
 
     Account authenticateWithSwift(String swiftEndPoint) throws IOException {
-        String swiftEndPointAuthUrl = System.getProperty("dataverse.files." + this.driverId + ".authUrl." + swiftEndPoint);
-        String swiftEndPointUsername = System.getProperty("dataverse.files." + this.driverId + ".username." + swiftEndPoint);
-        String swiftEndPointSecretKey = System.getProperty("dataverse.files." + this.driverId + ".password." + swiftEndPoint);
-        String swiftEndPointTenantName = System.getProperty("dataverse.files." + this.driverId + ".tenant." + swiftEndPoint);
-        String swiftEndPointAuthMethod = System.getProperty("dataverse.files." + this.driverId + ".authType." + swiftEndPoint);
-        String swiftEndPointTenantId = System.getProperty("dataverse.files." + this.driverId + ".tenant." + swiftEndPoint);
+        String swiftEndPointAuthUrl = getConfigParam(AUTH_URL + "." + swiftEndPoint);
+        String swiftEndPointUsername = getConfigParam(USERNAME + "." + swiftEndPoint);
+        String swiftEndPointSecretKey = getConfigParam(PASSWORD + "." + swiftEndPoint);
+        String swiftEndPointTenantName = getConfigParam(TENANT + "." + swiftEndPoint);
+        String swiftEndPointAuthMethod = getConfigParam(AUTH_TYPE + "." + swiftEndPoint);
+        String swiftEndPointTenantId = getConfigParam(TENANT + "." + swiftEndPoint);
 
         if (swiftEndPointAuthUrl == null || swiftEndPointUsername == null || swiftEndPointSecretKey == null
                 || "".equals(swiftEndPointAuthUrl) || "".equals(swiftEndPointUsername) || "".equals(swiftEndPointSecretKey)) {
@@ -814,7 +825,7 @@ private String getSwiftFileURI(StoredObject fileObject) throws IOException {
     private String hmac = null;
     public String generateTempUrlSignature(String swiftEndPoint, String containerName, String objectName, int duration) throws IOException {
         if (hmac == null || isExpiryExpired(generateTempUrlExpiry(duration, System.currentTimeMillis()), duration, System.currentTimeMillis())) {
-            String secretKey = System.getProperty("dataverse.files." + this.driverId + ".hashKey." + swiftEndPoint);
+            String secretKey = getConfigParam(HASH_KEY + "." + swiftEndPoint);
             if (secretKey == null) {
                 throw new IOException("Please input a hash key under dataverse.files." + this.driverId + ".hashKey." + swiftEndPoint);
             }
@@ -841,7 +852,7 @@ public long generateTempUrlExpiry(int duration, long currentTime) {
 
     private String temporaryUrl = null;
     private String generateTemporarySwiftUrl(String swiftEndPoint, String containerName, String objectName, int duration) throws IOException {
-        String baseUrl = System.getProperty("dataverse.files." + this.driverId + ".endpoint." + swiftEndPoint);
+        String baseUrl = getConfigParam(ENDPOINT + "." + swiftEndPoint);
         String path = "/v1/" + containerName + "/" + objectName;
         
         if (temporaryUrl == null || isExpiryExpired(generateTempUrlExpiry(duration, System.currentTimeMillis()), duration, System.currentTimeMillis())) {
@@ -954,4 +965,9 @@ public List<String> cleanUp(Predicate<String> filter, boolean dryRun) throws IOE
         }
         return toDelete;
     }
+
+    @Override
+    public long retrieveSizeFromMedia() throws IOException {
+        throw new UnsupportedDataAccessOperationException("InputStreamIO: this method is not supported in this DataAccess driver."); 
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java
index c33b4e0fc71..bf5d4a0d6ab 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBean.java
@@ -6,8 +6,8 @@
 import com.mashape.unirest.http.Unirest;
 import com.mashape.unirest.http.exceptions.UnirestException;
 import java.io.Serializable;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 
 /**
  * This class contains all the methods that have external runtime dependencies
diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java
index 1aa384d205e..460e4727afc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java
@@ -8,9 +8,9 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.Arrays;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
 public class DataCaptureModuleUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
index f1785a42098..ccf861ebdc8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java
@@ -7,7 +7,10 @@
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
 import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+
+import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT;
 import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
@@ -44,6 +47,7 @@
 public class DatasetUtil {
 
     private static final Logger logger = Logger.getLogger(DatasetUtil.class.getCanonicalName());
+    public static final String datasetDefaultSummaryFieldNames = "dsDescription,subject,keyword,publication,notesText";
     public static String datasetLogoFilenameFinal = "dataset_logo_original";
     public static String datasetLogoThumbnail = "dataset_logo";
     public static String thumbExtension = ".thumb";
@@ -407,6 +411,69 @@ public static InputStream getThumbnailAsInputStream(Dataset dataset, int size) {
             return nonDefaultDatasetThumbnail;
         }
     }
+    
+    public static InputStream getLogoAsInputStream(Dataset dataset) {
+        if (dataset == null) {
+            return null;
+        }
+        StorageIO<Dataset> dataAccess = null;
+
+        try {
+            dataAccess = DataAccess.getStorageIO(dataset);
+        } catch (IOException ioex) {
+            logger.warning("getLogo(): Failed to initialize dataset StorageIO for " + dataset.getStorageIdentifier()
+                    + " (" + ioex.getMessage() + ")");
+        }
+
+        InputStream in = null;
+        try {
+            if (dataAccess == null) {
+                logger.warning(
+                        "getLogo(): Failed to initialize dataset StorageIO for " + dataset.getStorageIdentifier());
+            } else {
+                in = dataAccess.getAuxFileAsInputStream(datasetLogoFilenameFinal);
+            }
+        } catch (IOException ex) {
+            logger.fine(
+                    "Dataset-level thumbnail file does not exist, or failed to open; will try to find an image file that can be used as the thumbnail.");
+        }
+
+        if (in == null) {
+            DataFile thumbnailFile = dataset.getThumbnailFile();
+
+            if (thumbnailFile == null) {
+                if (dataset.isUseGenericThumbnail()) {
+                    logger.fine("Dataset (id :" + dataset.getId() + ") does not have a logo and is 'Use Generic'.");
+                    return null;
+                } else {
+                    thumbnailFile = attemptToAutomaticallySelectThumbnailFromDataFiles(dataset, null);
+                    if (thumbnailFile == null) {
+                        logger.fine("Dataset (id :" + dataset.getId()
+                                + ") does not have a logo available that could be selected automatically.");
+                        return null;
+                    } else {
+
+                    }
+                }
+            }
+            if (thumbnailFile.isRestricted()) {
+                logger.fine("Dataset (id :" + dataset.getId()
+                        + ") has a logo the user selected but the file must have later been restricted. Returning null.");
+                return null;
+            }
+
+            try {
+                in = ImageThumbConverter.getImageThumbnailAsInputStream(thumbnailFile.getStorageIO(),
+                        ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE).getInputStream();
+            } catch (IOException ioex) {
+                logger.warning("getLogo(): Failed to get logo from DataFile for " + dataset.getStorageIdentifier()
+                        + " (" + ioex.getMessage() + ")");
+                ioex.printStackTrace();
+            }
+
+        }
+        return in;
+    }
 
     /**
      * The dataset logo is the file that a user uploads which is *not* one of
@@ -429,32 +496,33 @@ public static boolean isDatasetLogoPresent(Dataset dataset, int size) {
         return false;
     }
 
-    public static List<DatasetField> getDatasetSummaryFields(DatasetVersion datasetVersion, String customFields) {
-        
-        List<DatasetField> datasetFields = new ArrayList<>();
-        
-        //if customFields are empty, go with default fields. 
-        if(customFields==null || customFields.isEmpty()){
-               customFields="dsDescription,subject,keyword,publication,notesText";
-        }
-        
-        String[] customFieldList= customFields.split(",");
-        Map<String,DatasetField> DatasetFieldsSet=new HashMap<>(); 
-        
+    public static List<DatasetField> getDatasetSummaryFields(DatasetVersion datasetVersion, String customFieldNames) {
+        Map<String, DatasetField> datasetFieldsSet = new HashMap<>();
         for (DatasetField dsf : datasetVersion.getFlatDatasetFields()) {
-            DatasetFieldsSet.put(dsf.getDatasetFieldType().getName(),dsf); 
+            datasetFieldsSet.put(dsf.getDatasetFieldType().getName(), dsf);
+        }
+        String[] summaryFieldNames = getDatasetSummaryFieldNames(customFieldNames);
+        List<DatasetField> datasetSummaryFields = new ArrayList<>();
+        for (String summaryFieldName : summaryFieldNames) {
+            DatasetField df = datasetFieldsSet.get(summaryFieldName);
+            if (df != null) {
+                datasetSummaryFields.add(df);
+            }
         }
-        
-        for(String cfl : customFieldList)
-        {
-                DatasetField df = DatasetFieldsSet.get(cfl);
-                if(df!=null)
-                datasetFields.add(df);
+        return datasetSummaryFields;
+    }
+
+    public static String[] getDatasetSummaryFieldNames(String customFieldNames) {
+        String summaryFieldNames;
+        // If the custom fields are empty, go with the default fields.
+        if(customFieldNames == null || customFieldNames.isEmpty()){
+            summaryFieldNames = datasetDefaultSummaryFieldNames;
+        } else {
+            summaryFieldNames = customFieldNames;
         }
-            
-        return datasetFields;
+        return summaryFieldNames.split(",");
     }
-    
+
     public static boolean isRsyncAppropriateStorageDriver(Dataset dataset){
         // ToDo - rsync was written before multiple store support and currently is hardcoded to use the DataAccess.S3 store.
         // When those restrictions are lifted/rsync can be configured per store, this test should check that setting
@@ -495,18 +563,34 @@ public static Long getDownloadSizeNumericBySelectedFiles(List<FileMetadata> file
     
     public static boolean validateDatasetMetadataExternally(Dataset ds, String executable, DataverseRequest request) {
         String sourceAddressLabel = "0.0.0.0"; 
+        String userIdentifier = "guest";
         
         if (request != null) {
             IpAddress sourceAddress = request.getSourceAddress();
             if (sourceAddress != null) {
                 sourceAddressLabel = sourceAddress.toString();
             }
+            
+            AuthenticatedUser user = request.getAuthenticatedUser();
+            
+            if (user != null) {
+                userIdentifier = user.getUserIdentifier();
+            }
         }
         
         String jsonMetadata; 
         
+        // We are sending the dataset metadata encoded in our standard json 
+        // format, with a couple of extra elements added, such as the ids of 
+        // the home collection and the user, in order to make it easier 
+        // for the filter to whitelist by these attributes. 
+        
         try {
-            jsonMetadata = json(ds).add("datasetVersion", json(ds.getLatestVersion())).add("sourceAddress", sourceAddressLabel).build().toString();
+            jsonMetadata = json(ds).add("datasetVersion", json(ds.getLatestVersion(), true))
+                    .add("sourceAddress", sourceAddressLabel)
+                    .add("userIdentifier", userIdentifier)
+                    .add("parentAlias", ds.getOwner().getAlias())
+                    .build().toString();
         } catch (Exception ex) {
             logger.warning("Failed to export dataset metadata as json; "+ex.getMessage() == null ? "" : ex.getMessage());
             return false; 
@@ -522,7 +606,7 @@ public static boolean validateDatasetMetadataExternally(Dataset ds, String execu
         try {
             File tempFile = File.createTempFile("datasetMetadataCheck", ".tmp");
             FileUtils.writeStringToFile(tempFile, jsonMetadata);
-                                    
+            
             // run the external executable: 
             String[] params = { executable, tempFile.getAbsolutePath() };
             Process p = Runtime.getRuntime().exec(params);
@@ -548,6 +632,10 @@ public static License getLicense(DatasetVersion dsv) {
 
     public static String getLicenseName(DatasetVersion dsv) {
         License license = DatasetUtil.getLicense(dsv);
+        return getLocalizedLicenseName(license);
+    }
+    
+    public static String getLocalizedLicenseName(License license) {
         return license != null ? getLocalizedLicenseDetails(license,"NAME")
                 : BundleUtil.getStringFromBundle("license.custom");
     }
@@ -557,10 +645,10 @@ public static String getLicenseURI(DatasetVersion dsv) {
         // Return the URI
         // For standard licenses, just return the stored URI
         return (license != null) ? license.getUri().toString()
-                // For custom terms, construct a URI with :draft or the version number in the URI
+                // For custom terms, construct a URI with draft version constant or the version number in the URI
                 : (dsv.getVersionState().name().equals("DRAFT")
                         ? dsv.getDataverseSiteUrl()
-                                + "/api/datasets/:persistentId/versions/:draft/customlicense?persistentId="
+                                + "/api/datasets/:persistentId/versions/" + DS_VERSION_DRAFT + "/customlicense?persistentId="
                                 + dsv.getDataset().getGlobalId().asString()
                         : dsv.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" + dsv.getVersionNumber()
                                 + "." + dsv.getMinorVersionNumber() + "/customlicense?persistentId="
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index 1d0ec0f19d9..0143fced87c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -15,8 +15,8 @@
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.PermissionServiceBean;
+import edu.harvard.iq.dataverse.api.ApiConstants;
 import edu.harvard.iq.dataverse.api.Util;
-import edu.harvard.iq.dataverse.api.Files;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
@@ -27,7 +27,6 @@
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import edu.harvard.iq.dataverse.util.json.JsonPrinter;
@@ -46,24 +45,24 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJBException;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonNumber;
-import javax.json.JsonObject;
-import javax.json.JsonArray;
-import javax.json.JsonObjectBuilder;
-import javax.validation.ConstraintViolation;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
+
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJBException;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonNumber;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.validation.ConstraintViolation;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
 
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 import org.apache.commons.io.IOUtils;
-import org.ocpsoft.common.util.Strings;
-
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.STATUS_ERROR;
-import static edu.harvard.iq.dataverse.api.AbstractApiBean.STATUS_OK;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDataFilesCommand;
+import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 
 /**
  *  Methods to add or replace a single file.
@@ -644,7 +643,7 @@ private boolean runAddReplacePhase1(Dataset owner,
                 df.setRootDataFileId(fileToReplace.getRootDataFileId());
             }
             // Reuse any file PID during a replace operation (if File PIDs are in use)
-            if (systemConfig.isFilePIDsEnabled()) {
+            if (systemConfig.isFilePIDsEnabledForCollection(owner.getOwner())) {
                 df.setGlobalId(fileToReplace.getGlobalId());
                 df.setGlobalIdCreateTime(fileToReplace.getGlobalIdCreateTime());
                 // Should be true or fileToReplace wouldn't have an identifier (since it's not
@@ -1205,18 +1204,25 @@ private boolean step_030_createNewFilesViaIngest(){
             clone = workingVersion.cloneDatasetVersion();
         }
         try {
-            CreateDataFileResult result = FileUtil.createDataFiles(workingVersion,
+            /*CreateDataFileResult result = FileUtil.createDataFiles(workingVersion,
                     this.newFileInputStream,
                     this.newFileName,
                     this.newFileContentType,
                     this.newStorageIdentifier,
                     this.newCheckSum,
                     this.newCheckSumType,
-                    this.systemConfig);
-            initialFileList = result.getDataFiles();
+                    this.systemConfig);*/
+            
+            UploadSessionQuotaLimit quota = null; 
+            if (systemConfig.isStorageQuotasEnforced()) {
+                quota = fileService.getUploadSessionQuotaLimit(dataset);
+            }
+            Command<CreateDataFileResult> cmd = new CreateNewDataFilesCommand(dvRequest, workingVersion, newFileInputStream, newFileName, newFileContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType);
+            CreateDataFileResult createDataFilesResult = commandEngine.submit(cmd);
+            initialFileList = createDataFilesResult.getDataFiles();
 
-        } catch (IOException ex) {
-            if (!Strings.isNullOrEmpty(ex.getMessage())) {
+        } catch (CommandException ex) {
+            if (ex.getMessage() != null && !ex.getMessage().isEmpty()) {
                 this.addErrorSevere(getBundleErr("ingest_create_file_err") + " " + ex.getMessage());
             } else {
                 this.addErrorSevere(getBundleErr("ingest_create_file_err"));
@@ -1929,11 +1935,6 @@ private boolean step_100_startIngestJobs(){
         //
         finalFileList.clear();
 
-        // TODO: Need to run ingwest async......
-        //if (true){
-            //return true;
-        //}
-
         if (!multifile) {
             msg("pre ingest start");
             // start the ingest!
@@ -1942,7 +1943,6 @@ private boolean step_100_startIngestJobs(){
         }
         return true;
     }
-
     
     private void msg(String m){
         logger.fine(m);
@@ -2157,7 +2157,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) {
 
             }
         }
-        catch ( javax.json.stream.JsonParsingException ex) {
+        catch ( jakarta.json.stream.JsonParsingException ex) {
             ex.printStackTrace();
             return error(BAD_REQUEST, "Json Parsing Exception :" + ex.getMessage());
         }
@@ -2172,7 +2172,7 @@ public Response addFiles(String jsonData, Dataset dataset, User authUser) {
 
 
         return Response.ok().entity(Json.createObjectBuilder()
-                .add("status", STATUS_OK)
+                .add("status", ApiConstants.STATUS_OK)
                 .add("data", Json.createObjectBuilder().add("Files", jarr).add("Result", result)).build() ).build();
     }
     
@@ -2326,7 +2326,7 @@ public Response replaceFiles(String jsonData, Dataset ds, User authUser) {
 
             }
         }
-        catch ( javax.json.stream.JsonParsingException ex) {
+        catch ( jakarta.json.stream.JsonParsingException ex) {
             ex.printStackTrace();
             return error(BAD_REQUEST, "Json Parsing Exception :" + ex.getMessage());
         }
@@ -2340,14 +2340,14 @@ public Response replaceFiles(String jsonData, Dataset ds, User authUser) {
                 .add("Number of files successfully replaced", successNumberofFiles);
 
         return Response.ok().entity(Json.createObjectBuilder()
-                .add("status", STATUS_OK)
+                .add("status", ApiConstants.STATUS_OK)
                 .add("data", Json.createObjectBuilder().add("Files", jarr).add("Result", result)).build() ).build();
     }
 
     protected static Response error(Response.Status sts, String msg ) {
         return Response.status(sts)
                 .entity( NullSafeJsonBuilder.jsonObjectBuilder()
-                        .add("status", STATUS_ERROR)
+                        .add("status", ApiConstants.STATUS_ERROR)
                         .add( "message", msg ).build()
                 ).type(MediaType.APPLICATION_JSON_TYPE).build();
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionInfo.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionInfo.java
index c967de249ac..ca8775593b8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionInfo.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/FileVersionInfo.java
@@ -16,6 +16,7 @@
  * 
  * @author rmp553
  */
+//ToDo - used at all?
 public class FileVersionInfo {
     
     private Long id;
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java
index 5e03899b790..a9b1694d842 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/CategoryMetadata.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.datavariable;
 
-import javax.persistence.Index;
-import javax.persistence.Entity;
-import javax.persistence.Table;
-import javax.persistence.GenerationType;
-import javax.persistence.GeneratedValue;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.JoinColumn;
+import jakarta.persistence.Index;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Table;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.JoinColumn;
 
 @Entity
 @Table(indexes = {@Index(columnList="category_id"), @Index(columnList="variablemetadata_id")})
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java
index 6462f690cac..b2e9441a163 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/DataVariable.java
@@ -8,22 +8,22 @@
 
 import java.io.Serializable;
 import java.util.Collection;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToMany;
 import org.hibernate.validator.constraints.NotBlank;
 import edu.harvard.iq.dataverse.DataTable;
 import java.util.ArrayList;
 import java.util.List;
-import javax.persistence.Column;
-import javax.persistence.Index;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Index;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java
index bf81aff3e2b..6896ef360ce 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/SummaryStatistic.java
@@ -7,14 +7,14 @@
 package edu.harvard.iq.dataverse.datavariable;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 /*
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java
index 242110e333f..b52c76930d7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VarGroup.java
@@ -1,14 +1,14 @@
 package edu.harvard.iq.dataverse.datavariable;
 
-import javax.persistence.Entity;
-import javax.persistence.Table;
-import javax.persistence.Index;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.JoinColumn;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Table;
+import jakarta.persistence.Index;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Column;
 import java.util.HashSet;
 import java.util.Set;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java
index 6a3e702a561..5ccef82b5d1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableCategory.java
@@ -8,16 +8,16 @@
 
 import java.io.Serializable;
 import java.util.List;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
 
 import edu.harvard.iq.dataverse.util.AlphaNumericComparator;
-import javax.persistence.Index;
-import javax.persistence.Table;
+import jakarta.persistence.Index;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java
index c18355c9979..29e821c28a4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableMetadata.java
@@ -2,18 +2,18 @@
 
 import java.io.Serializable;
 
-import javax.persistence.Entity;
-import javax.persistence.Table;
-import javax.persistence.Index;
-import javax.persistence.UniqueConstraint;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.JoinColumn;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Column;
-import javax.persistence.OneToMany;
-import javax.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Table;
+import jakarta.persistence.Index;
+import jakarta.persistence.UniqueConstraint;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Column;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.CascadeType;
 
 import java.util.Collection;
 import java.util.ArrayList;
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java
index 17098e6af54..eb04eac846b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRange.java
@@ -7,14 +7,14 @@
 package edu.harvard.iq.dataverse.datavariable;
 
 import java.io.Serializable;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java
index 81db4225515..d5f99f7e016 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableRangeItem.java
@@ -8,14 +8,14 @@
 
 import java.io.Serializable;
 import java.math.BigDecimal;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java
index 8287d1c7041..9fb4a3fd34c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datavariable/VariableServiceBean.java
@@ -7,12 +7,12 @@
 package edu.harvard.iq.dataverse.datavariable;
 
 import java.util.List;
-import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
@@ -105,7 +105,7 @@ public VariableFormatType findVariableFormatTypeByName(String name) {
         VariableFormatType type = null;
         try {
             type = (VariableFormatType)query.getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
             // DO nothing, just return null.
         }
         return type;
@@ -116,7 +116,7 @@ public VariableIntervalType findVariableIntervalTypeByName(String name) {
         VariableIntervalType type = null;
         try {
             type=(VariableIntervalType)em.createQuery(query).getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
             // DO nothing, just return null.
         }
         return type;
@@ -127,7 +127,7 @@ public SummaryStatisticType findSummaryStatisticTypeByName(String name) {
         SummaryStatisticType type = null;
         try {
             type = (SummaryStatisticType) em.createQuery(query).getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
             // DO nothing, just return null.
         }
         return type;
diff --git a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java
index 04e7d56fad6..f45a9058e7c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/dataverse/DataverseUtil.java
@@ -4,6 +4,7 @@
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObjectContainer;
 import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -15,7 +16,7 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.ws.rs.BadRequestException;
+import jakarta.ws.rs.BadRequestException;
 
 import opennlp.tools.util.StringUtil;
 import org.apache.commons.io.FileUtils;
@@ -37,16 +38,32 @@ public static boolean validateDataverseMetadataExternally(Dataverse dv, String e
         String jsonMetadata;
 
         String sourceAddressLabel = "0.0.0.0";
+        String userIdentifier = "guest";
+        String parentAlias = dv.getOwner() == null ? "" : dv.getOwner().getAlias();
 
         if (request != null) {
             IpAddress sourceAddress = request.getSourceAddress();
             if (sourceAddress != null) {
                 sourceAddressLabel = sourceAddress.toString();
             }
+            AuthenticatedUser user = request.getAuthenticatedUser();
+            
+            if (user != null) {
+                userIdentifier = user.getUserIdentifier();
+            }
         }
-
+        
+        // We are sending the collection metadata encoded in our standard json 
+        // format, with a couple of extra elements added, such as the id of 
+        // the user sending the request and the alias of the parent collection, 
+        // in order to make it easier for the filter to manage whitelisting. 
+        
         try {
-            jsonMetadata = json(dv).add("sourceAddress", sourceAddressLabel).build().toString();
+            jsonMetadata = json(dv)
+                    .add("sourceAddress", sourceAddressLabel)
+                    .add("userIdentifier", userIdentifier)
+                    .add("parentAlias", parentAlias)
+                    .build().toString();
         } catch (Exception ex) {
             logger.warning(
                     "Failed to export dataverse metadata as json; " + ex.getMessage() == null ? "" : ex.getMessage());
@@ -87,7 +104,7 @@ public static void checkMetadataLangauge(Dataset ds, Dataverse owner, Map<String
         // :MetadataLanguage setting is not set
         // Must send UNDEFINED or match parent
         if (mLangMap.isEmpty()) {
-            if (!(ds.getMetadataLanguage().equals(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE)
+            if (!(ds.getMetadataLanguage().equals(DvObjectContainer.UNDEFINED_CODE)
                     || ds.getMetadataLanguage().equals(owner.getMetadataLanguage()))) {
                 throw new BadRequestException("This repository is not configured to support metadataLanguage.");
             }
@@ -96,8 +113,8 @@ public static void checkMetadataLangauge(Dataset ds, Dataverse owner, Map<String
             // parent collection choice, or, if that is undefined, be one of the choices
             // allowed by the setting
             if (!((ds.getMetadataLanguage().equals(owner.getMetadataLanguage())
-                    && !owner.getMetadataLanguage().equals(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE))
-                    || (owner.getMetadataLanguage().equals(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE)
+                    && !owner.getMetadataLanguage().equals(DvObjectContainer.UNDEFINED_CODE))
+                    || (owner.getMetadataLanguage().equals(DvObjectContainer.UNDEFINED_CODE)
                             && (mLangMap.containsKey(ds.getMetadataLanguage()))))) {
                 throw new BadRequestException("Specified metadatalanguage ( metadataLanguage, "
                         + JsonLDTerm.schemaOrg("inLanguage").getUrl() + ") not allowed in this collection.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
index 8e555d5f7a2..f74c1222bb0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/CommandContext.java
@@ -33,16 +33,17 @@
 import edu.harvard.iq.dataverse.engine.DataverseEngine;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.search.IndexBatchServiceBean;
 import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.storageuse.StorageUseServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
-import java.util.List;
 import java.util.Stack;
-import javax.persistence.EntityManager;
+import jakarta.persistence.EntityManager;
 
 /**
  * An interface for accessing Dataverse's resources, user info etc. Used by the
@@ -107,6 +108,8 @@ public interface CommandContext {
 
     public HandlenetServiceBean handleNet();
 
+    public PermaLinkPidProviderServiceBean permaLinkProvider();
+
     public GuestbookServiceBean guestbooks();
 
     public GuestbookResponseServiceBean responses();
@@ -124,6 +127,8 @@ public interface CommandContext {
     public UserNotificationServiceBean notifications();
 
     public AuthenticationServiceBean authentication();
+    
+    public StorageUseServiceBean storageUse();
 
     public SystemConfig systemConfig();
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
index f0cba005a4a..d792b616a0c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/DataverseRequest.java
@@ -1,16 +1,16 @@
 package edu.harvard.iq.dataverse.engine.command;
 
 import edu.harvard.iq.dataverse.api.AbstractApiBean;
-import edu.harvard.iq.dataverse.api.batchjob.FileRecordJobResource;
 import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import java.util.Arrays;
 import java.util.Enumeration;
 import java.util.HashSet;
+import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  * 
@@ -24,9 +24,12 @@ public class DataverseRequest {
     private final User user;
     private final IpAddress sourceAddress;
     private final String invocationId;
+    private final HttpServletRequest httpServletRequest;
     
     private final static String undefined = "0.0.0.0";
     
+    private final static String MDKEY_PREFIX="mdkey.";
+    
     private static final Logger logger = Logger.getLogger(DataverseRequest.class.getName());
     
     private static String headerToUse = null;
@@ -55,11 +58,12 @@ public class DataverseRequest {
     
     public DataverseRequest(User aUser, HttpServletRequest aHttpServletRequest) {
         this.user = aUser;
-
+        httpServletRequest = aHttpServletRequest;
+        
         IpAddress address = null;
 
         if (aHttpServletRequest != null) {
-
+           
             if (headerToUse != null) {
                 /*
                  * The optional case of using a header to determine the IP address is discussed
@@ -151,6 +155,7 @@ public DataverseRequest( User aUser, IpAddress aSourceAddress ) {
         user = aUser;
         sourceAddress = aSourceAddress;
         invocationId=null;
+        httpServletRequest=null;
     }
     
     public User getUser() {
@@ -187,4 +192,21 @@ public String getWFInvocationId() {
         return invocationId;
     }
     
+    public HttpServletRequest getHttpServletRequest() {
+        return httpServletRequest;
+    }
+    
+    public String getSystemMetadataBlockKeyFor(String blockName) {
+        String key = null;
+        if (httpServletRequest != null) {
+            key = httpServletRequest.getHeader(MDKEY_PREFIX + blockName);
+            logger.log(Level.FINE, ((key==null)? "Didn't find": "Found") + "system metadata block key for " + blockName + " in header");
+            if (key == null) {
+                key = httpServletRequest.getParameter(MDKEY_PREFIX + blockName);
+                logger.log(Level.FINE, ((key==null)? "Didn't find": "Found") + "system metadata block key for " + blockName + " in query parameter");
+            }
+        }
+        return key;
+    }
+    
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java
index 1465cbd74e2..303d8e1c25f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java
@@ -30,15 +30,23 @@ public abstract class AbstractCreateDatasetCommand extends AbstractDatasetComman
     
     private static final Logger logger = Logger.getLogger(AbstractCreateDatasetCommand.class.getCanonicalName());
     
-    final protected boolean registrationRequired;
+    final protected boolean harvested;
+    final protected boolean validate;
     
     public AbstractCreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest) {
         this(theDataset, aRequest, false);
     }
 
-    public AbstractCreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean isRegistrationRequired) {
+    public AbstractCreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean isHarvested) {
         super(aRequest, theDataset);
-        registrationRequired = isRegistrationRequired;
+        harvested=isHarvested;
+        this.validate = true;
+    }
+
+    public AbstractCreateDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean isHarvested, boolean validate) {
+        super(aRequest, theDataset);
+        harvested=isHarvested;
+        this.validate = validate;
     }
    
     protected void additionalParameterTests(CommandContext ctxt) throws CommandException {
@@ -75,13 +83,17 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         Dataset theDataset = getDataset();
         GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(ctxt);
         if ( isEmpty(theDataset.getIdentifier()) ) {
-            theDataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(theDataset, idServiceBean));
+            theDataset.setIdentifier(idServiceBean.generateDatasetIdentifier(theDataset));
         }
         
         DatasetVersion dsv = getVersionToPersist(theDataset);
         // This re-uses the state setup logic of CreateDatasetVersionCommand, but
         // without persisting the new version, or altering its files. 
-        new CreateDatasetVersionCommand(getRequest(), theDataset, dsv).prepareDatasetAndVersion();
+        new CreateDatasetVersionCommand(getRequest(), theDataset, dsv, validate).prepareDatasetAndVersion();
+        
+        if(!harvested) {
+            checkSystemMetadataKeyIfNeeded(dsv, null);
+        }
         
         theDataset.setCreator((AuthenticatedUser) getRequest().getUser());
         
@@ -105,15 +117,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         	theDataset.setStorageIdentifier(driverId  + DataAccess.SEPARATOR + theDataset.getAuthorityForFileStorage() + "/" + theDataset.getIdentifierForFileStorage());
         }
         if (theDataset.getIdentifier()==null) {
-            theDataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(theDataset, idServiceBean));
+            theDataset.setIdentifier(idServiceBean.generateDatasetIdentifier(theDataset));
         }
         
         // Attempt the registration if importing dataset through the API, or the app (but not harvest)
         handlePid(theDataset, ctxt);
-                
-        if (registrationRequired && (theDataset.getGlobalIdCreateTime() == null)) {
-            throw new CommandExecutionException("Dataset could not be created.  Registration failed", this);
-        }
         
         ctxt.em().persist(theDataset);
         
@@ -131,16 +139,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         //Use for code that requires database ids
         postDBFlush(theDataset, ctxt);
         
-        // TODO: this needs to be moved in to an onSuccess method; not adding to this PR as its out of scope
-        // TODO: switch to asynchronous version when JPA sync works
-        // ctxt.index().asyncIndexDataset(theDataset.getId(), true); 
-        try{
-              ctxt.index().indexDataset(theDataset, true);
-        } catch (IOException | SolrServerException e) {
-            String failureLogText = "Post create dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + theDataset.getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, theDataset);
-        }
+        ctxt.index().asyncIndexDataset(theDataset, true);
                  
         return theDataset;
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
index f3b75d23c63..6061461306d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommand.java
@@ -3,6 +3,7 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.DatasetVersionDifference;
 import edu.harvard.iq.dataverse.DatasetVersionUser;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -13,16 +14,19 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+
 import java.sql.Timestamp;
 import java.util.Date;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import static java.util.stream.Collectors.joining;
-import javax.validation.ConstraintViolation;
+
+import jakarta.validation.ConstraintViolation;
 import edu.harvard.iq.dataverse.GlobalIdServiceBean;
+import edu.harvard.iq.dataverse.MetadataBlock;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
-import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 
 /**
  *
@@ -150,19 +154,16 @@ protected void registerExternalIdentifier(Dataset theDataset, CommandContext ctx
         if (!theDataset.isIdentifierRegistered()) {
             GlobalIdServiceBean globalIdServiceBean = GlobalIdServiceBean.getBean(theDataset.getProtocol(), ctxt);
             if ( globalIdServiceBean != null ) {
-                if (globalIdServiceBean instanceof FakePidProviderServiceBean) {
-                    retry=false; //No reason to allow a retry with the FakeProvider, so set false for efficiency
-                }
                 try {
-                    if (globalIdServiceBean.alreadyExists(theDataset)) {
+                    if (globalIdServiceBean.alreadyRegistered(theDataset)) {
                         int attempts = 0;
                         if(retry) {
                             do  {
-                                theDataset.setIdentifier(ctxt.datasets().generateDatasetIdentifier(theDataset, globalIdServiceBean));
+                                theDataset.setIdentifier(globalIdServiceBean.generateDatasetIdentifier(theDataset));
                                 logger.log(Level.INFO, "Attempting to register external identifier for dataset {0} (trying: {1}).",
                                     new Object[]{theDataset.getId(), theDataset.getIdentifier()});
                                 attempts++;
-                            } while (globalIdServiceBean.alreadyExists(theDataset) && attempts <= FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT);
+                            } while (globalIdServiceBean.alreadyRegistered(theDataset) && attempts <= FOOLPROOF_RETRIAL_ATTEMPTS_LIMIT);
                         }
                         if(!retry) {
                             logger.warning("Reserving PID for: "  + getDataset().getId() + " during publication failed.");
@@ -212,4 +213,21 @@ public void setDataset(Dataset dataset) {
     protected Timestamp getTimestamp() {
         return timestamp;
     }
+
+    protected void checkSystemMetadataKeyIfNeeded(DatasetVersion newVersion, DatasetVersion persistedVersion) throws IllegalCommandException {
+        Set<MetadataBlock> changedMDBs = DatasetVersionDifference.getBlocksWithChanges(newVersion, persistedVersion);
+        for (MetadataBlock mdb : changedMDBs) {
+            logger.fine(mdb.getName() + " has been changed");
+            String smdbString = JvmSettings.MDB_SYSTEM_KEY_FOR.lookupOptional(mdb.getName())
+                    .orElse(null);
+            if (smdbString != null) {
+                logger.fine("Found key: " + smdbString);
+                String mdKey = getRequest().getSystemMetadataBlockKeyFor(mdb.getName());
+                logger.fine("Found supplied key: " + mdKey);
+                if (mdKey == null || !mdKey.equalsIgnoreCase(smdbString)) {
+                    throw new IllegalCommandException("Updating system metadata in block " + mdb.getName() + " requires a valid key", this);
+                }
+            }
+        }
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java
index 8ba1d181609..59c5d970b09 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AddRoleAssigneesToExplicitGroupCommand.java
@@ -16,7 +16,7 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJBException;
+import jakarta.ejb.EJBException;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java
index 4a5998aea00..94aff3e3f5d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ChangeUserIdentifierCommand.java
@@ -18,11 +18,11 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import java.util.List;
 import java.util.Set;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
-import javax.validation.ValidatorFactory;
-import javax.ws.rs.core.Response;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
+import jakarta.validation.ValidatorFactory;
+import jakarta.ws.rs.core.Response;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java
index 72439d4ba4a..bcaece55fed 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommand.java
@@ -27,15 +27,26 @@ public class CreateDatasetVersionCommand extends AbstractDatasetCommand<DatasetV
     
     final DatasetVersion newVersion;
     final Dataset dataset;
+    final boolean validate;
     
     public CreateDatasetVersionCommand(DataverseRequest aRequest, Dataset theDataset, DatasetVersion aVersion) {
+        this(aRequest, theDataset, aVersion, true);
+    }
+
+    public CreateDatasetVersionCommand(DataverseRequest aRequest, Dataset theDataset, DatasetVersion aVersion, boolean validate) {
         super(aRequest, theDataset);
         dataset = theDataset;
         newVersion = aVersion;
+        this.validate = validate;
     }
     
     @Override
     public DatasetVersion execute(CommandContext ctxt) throws CommandException {
+        /*
+         * CreateDatasetVersionCommand assumes you have not added your new version to
+         * the dataset you send. Use UpdateDatasetVersionCommand if you created the new
+         * version via Dataset.getOrCreateEditVersion() and just want to persist it.
+         */
         DatasetVersion latest = dataset.getLatestVersion();
         if ( latest.isWorkingCopy() ) {
             // A dataset can only have a single draft, which has to be the latest.
@@ -44,6 +55,10 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException {
                 throw new IllegalCommandException("Latest version is already a draft. Cannot add another draft", this);
             }
         }
+        
+        //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied.
+        checkSystemMetadataKeyIfNeeded(newVersion, latest);
+
                 
         List<FileMetadata> newVersionMetadatum = new ArrayList<>(latest.getFileMetadatas().size());
         for ( FileMetadata fmd : latest.getFileMetadatas() ) {
@@ -59,10 +74,11 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException {
         //good wrapped response if the TOA/Request Access not in compliance
         prepareDatasetAndVersion();
         
-        // TODO make async
-        // ctxt.index().indexDataset(dataset);
-        return ctxt.datasets().storeVersion(newVersion);
-        
+        DatasetVersion version = ctxt.datasets().storeVersion(newVersion);
+        if (ctxt.index() != null) {
+            ctxt.index().asyncIndexDataset(dataset, true);
+        }
+        return version;
     }
     
     /**
@@ -81,7 +97,9 @@ public void prepareDatasetAndVersion() throws CommandException {
         //originally missing/empty required fields were not
         //throwing constraint violations because they
         //had been stripped from the dataset fields prior to validation 
-        validateOrDie(newVersion, false);
+        if (this.validate) {
+            validateOrDie(newVersion, false);
+        }
         DatasetFieldUtil.tidyUpFields(newVersion.getDatasetFields(), true);
         
         final List<DatasetVersion> currentVersions = dataset.getVersions();
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
index cece4230eed..3efefe90681 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java
@@ -97,6 +97,10 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
         if (ctxt.dataverses().findByAlias(created.getAlias()) != null) {
             throw new IllegalCommandException("A dataverse with alias " + created.getAlias() + " already exists", this);
         }
+        
+        if(created.getFilePIDsEnabled()!=null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) {
+            throw new IllegalCommandException("File PIDs cannot be enabled per collection", this);
+        }
 
         // Save the dataverse
         Dataverse managedDv = ctxt.dataverses().save(created);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestedDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestedDatasetCommand.java
index 01bcdca1238..78fe519e53d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestedDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateHarvestedDatasetCommand.java
@@ -16,7 +16,7 @@
 public class CreateHarvestedDatasetCommand extends AbstractCreateDatasetCommand {
 
     public CreateHarvestedDatasetCommand(Dataset theDataset, DataverseRequest aRequest) {
-        super(theDataset, aRequest);
+        super(theDataset, aRequest, true);
     }
     
     @Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java
new file mode 100644
index 00000000000..3a21345448b
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDataFilesCommand.java
@@ -0,0 +1,751 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException;
+import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker;
+import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+//import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException;
+import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper;
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
+import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.FileUtil;
+import static edu.harvard.iq.dataverse.util.FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
+import static edu.harvard.iq.dataverse.util.FileUtil.createIngestFailureReport;
+import static edu.harvard.iq.dataverse.util.FileUtil.determineFileType;
+import static edu.harvard.iq.dataverse.util.FileUtil.determineFileTypeByNameAndExtension;
+import static edu.harvard.iq.dataverse.util.FileUtil.getFilesTempDirectory;
+import static edu.harvard.iq.dataverse.util.FileUtil.saveInputStreamInTempFile;
+import static edu.harvard.iq.dataverse.util.FileUtil.useRecognizedType;
+import edu.harvard.iq.dataverse.util.ShapefileHandler;
+import edu.harvard.iq.dataverse.util.StringUtil;
+import edu.harvard.iq.dataverse.util.file.BagItFileHandler;
+import edu.harvard.iq.dataverse.util.file.BagItFileHandlerFactory;
+import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.logging.Logger;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.ZipFile;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+import jakarta.enterprise.inject.spi.CDI;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.StringUtils;
+
+/**
+ *
+ * @author landreev
+ */
+// Note the commented out @RequiredPermissions. We need to use dynamic 
+// permissions instead, to accommodate both adding files to an existing 
+// dataset and files being uploaded in the context of creating a new dataset
+// via the Add Dataset page. 
+//@RequiredPermissions( Permission.EditDataset )
+public class CreateNewDataFilesCommand extends AbstractCommand<CreateDataFileResult> {
+    private static final Logger logger = Logger.getLogger(CreateNewDataFilesCommand.class.getCanonicalName());
+    
+    private final DatasetVersion version;
+    private final InputStream inputStream;
+    private final String fileName;
+    private final String suppliedContentType; 
+    private final UploadSessionQuotaLimit quota;
+    // parent Dataverse must be specified when the command is called on Create 
+    // of a new dataset that does not exist in the database yet (for the purposes
+    // of authorization - see getRequiredPermissions() below):
+    private final Dataverse parentDataverse;
+    // With Direct Upload the following values already exist and are passed to the command:
+    private final String newStorageIdentifier; 
+    private final String newCheckSum; 
+    private DataFile.ChecksumType newCheckSumType;
+    private final Long newFileSize;
+
+    public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum) {
+        this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, null);
+    }
+    
+    public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum, DataFile.ChecksumType newCheckSumType) {
+        this(aRequest, version, inputStream, fileName, suppliedContentType, newStorageIdentifier, quota, newCheckSum, newCheckSumType, null, null);
+    }
+    
+    // This version of the command must be used when files are created in the 
+    // context of creating a brand new dataset (from the Add Dataset page):
+    
+    public CreateNewDataFilesCommand(DataverseRequest aRequest, DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, UploadSessionQuotaLimit quota, String newCheckSum, DataFile.ChecksumType newCheckSumType, Long newFileSize, Dataverse dataverse) {
+        super(aRequest, dataverse);
+        
+        this.version = version;
+        this.inputStream = inputStream;
+        this.fileName = fileName;
+        this.suppliedContentType = suppliedContentType; 
+        this.newStorageIdentifier = newStorageIdentifier; 
+        this.newCheckSum = newCheckSum; 
+        this.newCheckSumType = newCheckSumType;
+        this.parentDataverse = dataverse;
+        this.quota = quota;
+        this.newFileSize = newFileSize;
+    }
+    
+
+    @Override
+    public CreateDataFileResult execute(CommandContext ctxt) throws CommandException {
+        List<DataFile> datafiles = new ArrayList<>();
+
+        //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default
+        if(newCheckSumType == null) {
+            newCheckSumType = ctxt.systemConfig().getFileFixityChecksumAlgorithm();
+        }
+
+        String warningMessage = null;
+
+        // save the file, in the temporary location for now: 
+        Path tempFile = null;
+
+        Long fileSizeLimit = ctxt.systemConfig().getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId());
+        Long storageQuotaLimit = null; 
+        
+        if (ctxt.systemConfig().isStorageQuotasEnforced()) {
+            if (quota != null) {
+                storageQuotaLimit = quota.getRemainingQuotaInBytes();
+            }
+        }
+        String finalType = null;
+        File newFile = null;    // this File will be used for a single-file, local (non-direct) upload
+        long fileSize = -1; 
+
+
+        if (newStorageIdentifier == null) {
+            if (getFilesTempDirectory() != null) {
+                try {
+                    tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload");
+                    // "temporary" location is the key here; this is why we are not using
+                    // the DataStore framework for this - the assumption is that
+                    // temp files will always be stored on the local filesystem.
+                    // -- L.A. Jul. 2014
+                    logger.fine("Will attempt to save the file as: " + tempFile.toString());
+                    Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING);
+                } catch (IOException ioex) {
+                    throw new CommandExecutionException("Failed to save the upload as a temp file (temp disk space?)", ioex, this);
+                }
+
+                // A file size check, before we do anything else:
+                // (note that "no size limit set" = "unlimited")
+                // (also note, that if this is a zip file, we'll be checking
+                // the size limit for each of the individual unpacked files)
+                fileSize = tempFile.toFile().length();
+                if (fileSizeLimit != null && fileSize > fileSizeLimit) {
+                    try {
+                        tempFile.toFile().delete();
+                    } catch (Exception ex) {
+                        // ignore - but log a warning
+                        logger.warning("Could not remove temp file " + tempFile.getFileName());
+                    }
+                    throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit)), this);
+                }
+
+            } else {
+                throw new CommandExecutionException("Temp directory is not configured.", this);
+            }
+            
+            logger.fine("mime type supplied: " + suppliedContentType);
+            
+            // Let's try our own utilities (Jhove, etc.) to determine the file type
+            // of the uploaded file. (We may already have a mime type supplied for this
+            // file - maybe the type that the browser recognized on upload; or, if
+            // it's a harvest, maybe the remote server has already given us the type
+            // for this file... with our own type utility we may or may not do better
+            // than the type supplied:
+            // -- L.A.
+            String recognizedType = null;
+
+            try {
+                recognizedType = determineFileType(tempFile.toFile(), fileName);
+                logger.fine("File utility recognized the file as " + recognizedType);
+                if (recognizedType != null && !recognizedType.equals("")) {
+                    if (useRecognizedType(suppliedContentType, recognizedType)) {
+                        finalType = recognizedType;
+                    }
+                }
+
+            } catch (Exception ex) {
+                logger.warning("Failed to run the file utility mime type check on file " + fileName);
+            }
+
+            if (finalType == null) {
+                finalType = (suppliedContentType == null || suppliedContentType.equals(""))
+                        ? MIME_TYPE_UNDETERMINED_DEFAULT
+                        : suppliedContentType;
+            }
+
+            // A few special cases:
+            // if this is a gzipped FITS file, we'll uncompress it, and ingest it as
+            // a regular FITS file:
+            if (finalType.equals("application/fits-gzipped")) {
+
+                InputStream uncompressedIn = null;
+                String finalFileName = fileName;
+                // if the file name had the ".gz" extension, remove it,
+                // since we are going to uncompress it:
+                if (fileName != null && fileName.matches(".*\\.gz$")) {
+                    finalFileName = fileName.replaceAll("\\.gz$", "");
+                }
+
+                DataFile datafile = null;
+                long uncompressedFileSize = -1; 
+                try {
+                    uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile()));
+                    File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit, storageQuotaLimit);
+                    uncompressedFileSize = unZippedTempFile.length();
+                    datafile = FileUtil.createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, ctxt.systemConfig().getFileFixityChecksumAlgorithm());
+                } catch (IOException | FileExceedsMaxSizeException | FileExceedsStorageQuotaException ioex) {
+                    // it looks like we simply skip the file silently, if its uncompressed size
+                    // exceeds the limit. we should probably report this in detail instead.
+                    datafile = null;
+                } finally {
+                    if (uncompressedIn != null) {
+                        try {
+                            uncompressedIn.close();
+                        } catch (IOException e) {
+                        }
+                    }
+                }
+
+                // If we were able to produce an uncompressed file, we'll use it
+                // to create and return a final DataFile; if not, we're not going
+                // to do anything - and then a new DataFile will be created further
+                // down, from the original, uncompressed file.
+                if (datafile != null) {
+                    // remove the compressed temp file:
+                    try {
+                        tempFile.toFile().delete();
+                    } catch (SecurityException ex) {
+                        // (this is very non-fatal)
+                        logger.warning("Failed to delete temporary file " + tempFile.toString());
+                    }
+
+                    datafiles.add(datafile);
+                    // Update quota if present
+                    if (quota != null) {
+                        quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + uncompressedFileSize);
+                    }
+                    return CreateDataFileResult.success(fileName, finalType, datafiles);
+                }
+
+                // If it's a ZIP file, we are going to unpack it and create multiple
+                // DataFile objects from its contents:
+            } else if (finalType.equals("application/zip")) {
+
+                ZipFile zipFile = null;
+                ZipInputStream unZippedIn = null;
+                ZipEntry zipEntry = null;
+
+                int fileNumberLimit = ctxt.systemConfig().getZipUploadFilesLimit();
+                Long combinedUnzippedFileSize = 0L;
+
+                try {
+                    Charset charset = null;
+                    /*
+                	TODO: (?)
+                	We may want to investigate somehow letting the user specify 
+                	the charset for the filenames in the zip file...
+                    - otherwise, ZipInputStream bails out if it encounteres a file 
+                	name that's not valid in the current charest (i.e., UTF-8, in 
+                    our case). It would be a bit trickier than what we're doing for 
+                    SPSS tabular ingests - with the lang. encoding pulldown menu - 
+                	because this encoding needs to be specified *before* we upload and
+                    attempt to unzip the file. 
+                	        -- L.A. 4.0 beta12
+                	logger.info("default charset is "+Charset.defaultCharset().name());
+                	if (Charset.isSupported("US-ASCII")) {
+                    	logger.info("charset US-ASCII is supported.");
+                    	charset = Charset.forName("US-ASCII");
+                    	if (charset != null) {
+                       	    logger.info("was able to obtain charset for US-ASCII");
+                    	}
+                    
+                	 }
+                     */
+
+                    /** 
+                     * Perform a quick check for how many individual files are 
+                     * inside this zip archive. If it's above the limit, we can 
+                     * give up right away, without doing any unpacking. 
+                     * This should be a fairly inexpensive operation, we just need
+                     * to read the directory at the end of the file. 
+                     */
+                    
+                    if (charset != null) {
+                        zipFile = new ZipFile(tempFile.toFile(), charset);
+                    } else {
+                        zipFile = new ZipFile(tempFile.toFile());
+                    }
+                    /**
+                     * The ZipFile constructors above will throw ZipException - 
+                     * a type of IOException - if there's something wrong 
+                     * with this file as a zip. There's no need to intercept it
+                     * here, it will be caught further below, with other IOExceptions,
+                     * at which point we'll give up on trying to unpack it and
+                     * then attempt to save it as is.
+                     */
+
+                    int numberOfUnpackableFiles = 0; 
+                     
+                    /**
+                     * Note that we can't just use zipFile.size(),
+                     * unfortunately, since that's the total number of entries,
+                     * some of which can be directories. So we need to go
+                     * through all the individual zipEntries and count the ones
+                     * that are files.
+                     */
+
+                    for (Enumeration<? extends ZipEntry> entries = zipFile.entries(); entries.hasMoreElements();) {
+                        ZipEntry entry = entries.nextElement();
+                        logger.fine("inside first zip pass; this entry: "+entry.getName());
+                        if (!entry.isDirectory()) {
+                            String shortName = entry.getName().replaceFirst("^.*[\\/]", "");
+                            // ... and, finally, check if it's a "fake" file - a zip archive entry
+                            // created for a MacOS X filesystem element: (these
+                            // start with "._") 
+                            if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) {
+                                numberOfUnpackableFiles++;
+                                if (numberOfUnpackableFiles > fileNumberLimit) {
+                                    logger.warning("Zip upload - too many files in the zip to process individually.");
+                                    warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit
+                                            + "); please upload a zip archive with fewer files, if you want them to be ingested "
+                                            + "as individual DataFiles.";
+                                    throw new IOException();
+                                }
+                                // In addition to counting the files, we can
+                                // also check the file size while we're here, 
+                                // provided the size limit is defined; if a single 
+                                // file is above the individual size limit, unzipped,
+                                // we give up on unpacking this zip archive as well: 
+                                if (fileSizeLimit != null && entry.getSize() > fileSizeLimit) {
+                                    throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(entry.getSize()), bytesToHumanReadable(fileSizeLimit)));
+                                }
+                                // Similarly, we want to check if saving all these unpacked 
+                                // files is going to push the disk usage over the 
+                                // quota:
+                                if (storageQuotaLimit != null) {
+                                    combinedUnzippedFileSize = combinedUnzippedFileSize + entry.getSize();
+                                    if (combinedUnzippedFileSize > storageQuotaLimit) {
+                                        //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(combinedUnzippedFileSize), bytesToHumanReadable(storageQuotaLimit)));
+                                        // change of plans: if the unzipped content inside exceeds the remaining quota, 
+                                        // we reject the upload outright, rather than accepting the zip 
+                                        // file as is. 
+                                        throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.unzipped.quota_exceeded"), bytesToHumanReadable(storageQuotaLimit)), this);
+                                    }
+                                }
+                            }
+                        }
+                    }
+                    
+                    // OK we're still here - that means we can proceed unzipping. 
+                    
+                    // Close the ZipFile, re-open as ZipInputStream: 
+                    zipFile.close(); 
+                    // reset:
+                    combinedUnzippedFileSize = 0L;
+
+                    if (charset != null) {
+                        unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset);
+                    } else {
+                        unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()));
+                    }
+
+                    while (true) {
+                        try {
+                            zipEntry = unZippedIn.getNextEntry();
+                        } catch (IllegalArgumentException iaex) {
+                            // Note:
+                            // ZipInputStream documentation doesn't even mention that
+                            // getNextEntry() throws an IllegalArgumentException!
+                            // but that's what happens if the file name of the next
+                            // entry is not valid in the current CharSet.
+                            // -- L.A.
+                            warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is.";
+                            logger.warning(warningMessage);
+                            throw new IOException();
+                        }
+
+                        if (zipEntry == null) {
+                            break;
+                        }
+                        // Note that some zip entries may be directories - we
+                        // simply skip them:
+
+                        if (!zipEntry.isDirectory()) {
+                            if (datafiles.size() > fileNumberLimit) {
+                                logger.warning("Zip upload - too many files.");
+                                warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit
+                                        + "); please upload a zip archive with fewer files, if you want them to be ingested "
+                                        + "as individual DataFiles.";
+                                throw new IOException();
+                            }
+
+                            String fileEntryName = zipEntry.getName();
+                            logger.fine("ZipEntry, file: " + fileEntryName);
+
+                            if (fileEntryName != null && !fileEntryName.equals("")) {
+
+                                String shortName = fileEntryName.replaceFirst("^.*[\\/]", "");
+
+                                // Check if it's a "fake" file - a zip archive entry
+                                // created for a MacOS X filesystem element: (these
+                                // start with "._")
+                                if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) {
+                                    // OK, this seems like an OK file entry - we'll try
+                                    // to read it and create a DataFile with it:
+
+                                    String storageIdentifier = FileUtil.generateStorageIdentifier();
+                                    File unzippedFile = new File(getFilesTempDirectory() + "/" + storageIdentifier);
+                                    Files.copy(unZippedIn, unzippedFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
+                                    // No need to check the size of this unpacked file against the size limit, 
+                                    // since we've already checked for that in the first pass.
+                                    
+                                    DataFile datafile = FileUtil.createSingleDataFile(version, null, storageIdentifier, shortName,
+                                            MIME_TYPE_UNDETERMINED_DEFAULT,
+                                            ctxt.systemConfig().getFileFixityChecksumAlgorithm(), null, false);
+                                    
+                                    if (!fileEntryName.equals(shortName)) {
+                                        // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes),
+                                        // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all 
+                                        // the leading, trailing and duplicate slashes; then replace all the characters that 
+                                        // don't pass our validation rules.
+                                        String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", "");
+                                        directoryName = StringUtil.sanitizeFileDirectory(directoryName, true);
+                                        // if (!"".equals(directoryName)) {
+                                        if (!StringUtil.isEmpty(directoryName)) {
+                                            logger.fine("setting the directory label to " + directoryName);
+                                            datafile.getFileMetadata().setDirectoryLabel(directoryName);
+                                        }
+                                    }
+
+                                    if (datafile != null) {
+                                        // We have created this datafile with the mime type "unknown";
+                                        // Now that we have it saved in a temporary location,
+                                        // let's try and determine its real type:
+
+                                        String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier();
+
+                                        try {
+                                            recognizedType = determineFileType(unzippedFile, shortName);
+                                            // null the File explicitly, to release any open FDs:
+                                            unzippedFile = null;
+                                            logger.fine("File utility recognized unzipped file as " + recognizedType);
+                                            if (recognizedType != null && !recognizedType.equals("")) {
+                                                datafile.setContentType(recognizedType);
+                                            }
+                                        } catch (Exception ex) {
+                                            logger.warning("Failed to run the file utility mime type check on file " + fileName);
+                                        }
+
+                                        datafiles.add(datafile);
+                                        combinedUnzippedFileSize += datafile.getFilesize();
+                                    }
+                                }
+                            }
+                        }
+                        unZippedIn.closeEntry();
+
+                    }
+
+                } catch (IOException ioex) {
+                    // just clear the datafiles list and let
+                    // ingest default to creating a single DataFile out
+                    // of the unzipped file.
+                    logger.warning("Unzipping failed; rolling back to saving the file as is.");
+                    if (warningMessage == null) {
+                        warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed");
+                    }
+
+                    datafiles.clear();
+                } catch (FileExceedsMaxSizeException femsx) {
+                    logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage());
+                    warningMessage =  BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit)));
+                    datafiles.clear();
+                } /*catch (FileExceedsStorageQuotaException fesqx) {
+                    //logger.warning("One of the unzipped files exceeds the storage quota limit; resorting to saving the file as is. " + fesqx.getMessage());
+                    //warningMessage =  BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.quota", Arrays.asList(FileSizeChecker.bytesToHumanReadable(storageQuotaLimit)));
+                    //datafiles.clear();
+                    throw new CommandExecutionException(fesqx.getMessage(), fesqx, this);
+                }*/ finally {
+                    if (zipFile != null) {
+                        try {
+                            zipFile.close();
+                        } catch (Exception zEx) {}
+                    }
+                    if (unZippedIn != null) {
+                        try {
+                            unZippedIn.close();
+                        } catch (Exception zEx) {}
+                    }
+                }
+                if (!datafiles.isEmpty()) {
+                    // remove the uploaded zip file:
+                    try {
+                        Files.delete(tempFile);
+                    } catch (IOException ioex) {
+                        // do nothing - it's just a temp file.
+                        logger.warning("Could not remove temp file " + tempFile.getFileName().toString());
+                    }
+                    // update the quota object: 
+                    if (quota != null) {
+                        quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + combinedUnzippedFileSize);
+                    }
+                    // and return:
+                    return CreateDataFileResult.success(fileName, finalType, datafiles);
+                }
+
+            } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) {
+                // Shape files may have to be split into multiple files,
+                // one zip archive per each complete set of shape files:
+
+                // File rezipFolder = new File(this.getFilesTempDirectory());
+                File rezipFolder = FileUtil.getShapefileUnzipTempDirectory();
+
+                IngestServiceShapefileHelper shpIngestHelper;
+                shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder);
+
+                boolean didProcessWork = shpIngestHelper.processFile();
+                if (!(didProcessWork)) {
+                    logger.severe("Processing of zipped shapefile failed.");
+                    return CreateDataFileResult.error(fileName, finalType);
+                }
+                long combinedRezippedFileSize = 0L;
+
+                try {
+                    
+                    for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) {
+                        FileInputStream finalFileInputStream = new FileInputStream(finalFile);
+                        finalType = FileUtil.determineContentType(finalFile);
+                        if (finalType == null) {
+                            logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'");
+                            continue;
+                        }
+
+                        File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit, storageQuotaLimit != null ? storageQuotaLimit - combinedRezippedFileSize : null);
+                        DataFile new_datafile = FileUtil.createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, ctxt.systemConfig().getFileFixityChecksumAlgorithm());
+                        
+                        String directoryName = null;
+                        String absolutePathName = finalFile.getParent();
+                        if (absolutePathName != null) {
+                            if (absolutePathName.length() > rezipFolder.toString().length()) {
+                                // This file lives in a subfolder - we want to 
+                                // preserve it in the FileMetadata:
+                                directoryName = absolutePathName.substring(rezipFolder.toString().length() + 1);
+
+                                if (!StringUtil.isEmpty(directoryName)) {
+                                    new_datafile.getFileMetadata().setDirectoryLabel(directoryName);
+                                }
+                            }
+                        }
+                        if (new_datafile != null) {
+                            datafiles.add(new_datafile);
+                            combinedRezippedFileSize += unZippedShapeTempFile.length();
+                            // todo: can this new_datafile be null?
+                        } else {
+                            logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName());
+                        }
+                        try {
+                            finalFileInputStream.close();
+                        } catch (IOException ioex) {
+                            // this one can be ignored
+                        }
+                    }
+                } catch (FileExceedsMaxSizeException | FileExceedsStorageQuotaException femsx) {
+                    logger.severe("One of the unzipped shape files exceeded the size limit, or the storage quota; giving up. " + femsx.getMessage());
+                    datafiles.clear();
+                    // (or should we throw an exception, instead of skipping it quietly?
+                } catch (IOException ioex) {
+                    throw new CommandExecutionException("Failed to process one of the components of the unpacked shape file", ioex, this);
+                    // todo? - maybe try to provide a more detailed explanation, of which repackaged component, etc.?
+                }
+
+                // Delete the temp directory used for unzipping
+                // The try-catch is due to error encountered in using NFS for stocking file,
+                // cf. https://github.com/IQSS/dataverse/issues/5909
+                try {
+                    FileUtils.deleteDirectory(rezipFolder);
+                } catch (IOException ioex) {
+                    // do nothing - it's a temp folder.
+                    logger.warning("Could not remove temp folder, error message : " + ioex.getMessage());
+                }
+
+                if (!datafiles.isEmpty()) {
+                    // remove the uploaded zip file:
+                    try {
+                        Files.delete(tempFile);
+                    } catch (IOException ioex) {
+                        // ignore - it's just a temp file - but let's log a warning
+                        logger.warning("Could not remove temp file " + tempFile.getFileName().toString());
+                    } catch (SecurityException se) {
+                        // same
+                        logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: "
+                                + se.getMessage());
+                    }
+                    // update the quota object: 
+                    if (quota != null) {
+                        quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + combinedRezippedFileSize);
+                    }
+                    return CreateDataFileResult.success(fileName, finalType, datafiles);
+                } else {
+                    logger.severe("No files added from directory of rezipped shapefiles");
+                }
+                return CreateDataFileResult.error(fileName, finalType);
+
+            } else if (finalType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE)) {
+                
+                try { 
+                    Optional<BagItFileHandler> bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler();
+                    if (bagItFileHandler.isPresent()) {
+                        CreateDataFileResult result = bagItFileHandler.get().handleBagItPackage(ctxt.systemConfig(), version, fileName, tempFile.toFile());
+                        return result;
+                    }
+                } catch (IOException ioex) {
+                    throw new CommandExecutionException("Failed to process uploaded BagIt file", ioex, this);
+                }
+            }
+            
+            // These are the final File and its size that will be used to 
+            // add create a single Datafile: 
+            
+            newFile = tempFile.toFile();
+            fileSize = newFile.length();
+            
+        } else {
+            // Direct upload.
+            
+            // Since this is a direct upload, and therefore no temp file associated 
+            // with it, we may, OR MAY NOT know the size of the file. If this is 
+            // a direct upload via the UI, the page must have already looked up 
+            // the size, after the client confirmed that the upload had completed. 
+            // (so that we can reject the upload here, i.e. before the user clicks
+            // save, if it's over the size limit or storage quota). However, if 
+            // this is a direct upload via the API, we will wait until the 
+            // upload is finalized in the saveAndAddFiles method to enforce the 
+            // limits. 
+            if (newFileSize != null) {
+                fileSize = newFileSize;
+                
+                // if the size is specified, and it's above the individual size 
+                // limit for this store, we can reject it now:
+                if (fileSizeLimit != null && fileSize > fileSizeLimit) {
+                    throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit)), this);
+                }
+            }
+            
+            // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied
+            finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType;
+            String type = determineFileTypeByNameAndExtension(fileName);
+            if (!StringUtils.isBlank(type)) {
+                //Use rules for deciding when to trust browser supplied type
+                if (useRecognizedType(finalType, type)) {
+                    finalType = type;
+                }
+                logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType);
+            }
+            
+            
+        }
+        
+        // Finally, if none of the special cases above were applicable (or 
+        // if we were unable to unpack an uploaded file, etc.), we'll just 
+        // create and return a single DataFile:
+        
+        
+        // We have already checked that this file does not exceed the individual size limit; 
+        // but if we are processing it as is, as a single file, we need to check if 
+        // its size does not go beyond the allocated storage quota (if specified):
+        
+        if (storageQuotaLimit != null && fileSize > storageQuotaLimit) {
+            if (newFile != null) {
+                // Remove the temp. file, if this is a non-direct upload. 
+                // If this is a direct upload, it will be a responsibility of the 
+                // component calling the command to remove the file that may have
+                // already been saved in the S3 volume. 
+                try {
+                    newFile.delete();
+                } catch (Exception ex) {
+                    // ignore - but log a warning
+                    logger.warning("Could not remove temp file " + tempFile.getFileName());
+                }
+            }
+            throw new CommandExecutionException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit)), this);
+        } 
+        
+        DataFile datafile = FileUtil.createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum);
+
+        if (datafile != null) {
+
+            if (warningMessage != null) {
+                createIngestFailureReport(datafile, warningMessage);
+                datafile.SetIngestProblem();
+            }
+            if (datafile.getFilesize() < 0) {
+                datafile.setFilesize(fileSize);
+            }
+            datafiles.add(datafile);
+
+            // Update the quota definition for the *current upload session*
+            // This is relevant for the uploads going through the UI page 
+            // (where there may be an appreciable amount of time between the user
+            // uploading the files and clicking "save". The file size should be 
+            // available here for both direct and local uploads via the UI. 
+            // It is not yet available if this is direct-via-API - but 
+            // for API uploads the quota check will be enforced during the final 
+            // save. 
+            if (fileSize > 0 && quota != null) {
+                logger.info("Setting total usage in bytes to " + (quota.getTotalUsageInBytes() + fileSize));
+                quota.setTotalUsageInBytes(quota.getTotalUsageInBytes() + fileSize);
+            }
+
+            return CreateDataFileResult.success(fileName, finalType, datafiles);
+        }
+
+        return CreateDataFileResult.error(fileName, finalType);
+    }   // end createDataFiles
+    
+    @Override
+    public Map<String, Set<Permission>> getRequiredPermissions() {
+        Map<String, Set<Permission>> ret = new HashMap<>();
+
+        ret.put("", new HashSet<>());
+        
+        if (parentDataverse != null) {
+            // The command is called in the context of uploading files on 
+            // create of a new dataset
+            ret.get("").add(Permission.AddDataset);
+        } else {
+            // An existing dataset
+            ret.get("").add(Permission.EditDataset);
+        }
+
+        return ret;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java
index 1efaf14c755..c9ebe735e31 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateNewDatasetCommand.java
@@ -49,15 +49,17 @@ public class CreateNewDatasetCommand extends AbstractCreateDatasetCommand {
     private final Dataverse dv;
 
     public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest) {
-        this( theDataset, aRequest, false); 
+        this( theDataset, aRequest, null);
     }
     
-    public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean registrationRequired) {
-        this( theDataset, aRequest, registrationRequired, null);
+    public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, Template template) {
+        super(theDataset, aRequest);
+        this.template = template;
+        dv = theDataset.getOwner();
     }
     
-    public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, boolean registrationRequired, Template template) {
-        super(theDataset, aRequest, registrationRequired);
+    public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, Template template, boolean validate) {
+        super(theDataset, aRequest, false, validate);
         this.template = template;
         dv = theDataset.getOwner();
     }
@@ -71,7 +73,7 @@ public CreateNewDatasetCommand(Dataset theDataset, DataverseRequest aRequest, bo
     protected void additionalParameterTests(CommandContext ctxt) throws CommandException {
         if ( nonEmpty(getDataset().getIdentifier()) ) {
             GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(getDataset().getProtocol(), ctxt);
-            if ( !ctxt.datasets().isIdentifierUnique(getDataset().getIdentifier(), getDataset(), idServiceBean) ) {
+            if ( !idServiceBean.isGlobalIdUnique(getDataset().getGlobalId()) ) {
                 throw new IllegalCommandException(String.format("Dataset with identifier '%s', protocol '%s' and authority '%s' already exists",
                                                                  getDataset().getIdentifier(), getDataset().getProtocol(), getDataset().getAuthority()), 
                     this);
@@ -87,6 +89,9 @@ protected DatasetVersion getVersionToPersist( Dataset theDataset ) {
     @Override
     protected void handlePid(Dataset theDataset, CommandContext ctxt) throws CommandException {
         GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(ctxt);
+        if(!idServiceBean.isConfigured()) {
+            throw new IllegalCommandException("PID Provider " + idServiceBean.getProviderInformation().get(0) + " is not configured.", this);
+        }
         if ( !idServiceBean.registerWhenPublished() ) {
             // pre-register a persistent id
             registerExternalIdentifier(theDataset, ctxt, true);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
index cb9b0a3c774..8cffcd3d821 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
 import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.api.AbstractApiBean;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -13,7 +12,7 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import javax.persistence.NoResultException;
+import jakarta.persistence.NoResultException;
 
 /**
  * Create a new role in a dataverse.
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java
index 147e1870566..7a549a51dd5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateSavedSearchCommand.java
@@ -12,7 +12,7 @@
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 @RequiredPermissions(Permission.PublishDataverse)
 public class CreateSavedSearchCommand extends AbstractCommand<SavedSearch> {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
index ca5bf1d3f2c..f83041d87bd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CuratePublishedDatasetVersionCommand.java
@@ -7,8 +7,8 @@
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
+import io.gdcc.spi.export.ExportException;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.DatasetFieldUtil;
 import edu.harvard.iq.dataverse.workflows.WorkflowComment;
@@ -55,10 +55,11 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         // Invariant: Dataset has no locks preventing the update
         DatasetVersion updateVersion = getDataset().getLatestVersionForCopy();
 
+        DatasetVersion newVersion = getDataset().getOrCreateEditVersion();
         // Copy metadata from draft version to latest published version
-        updateVersion.setDatasetFields(getDataset().getOrCreateEditVersion().initDatasetFields());
+        updateVersion.setDatasetFields(newVersion.initDatasetFields());
 
-        validateOrDie(updateVersion, isValidateLenient());
+        
 
         // final DatasetVersion editVersion = getDataset().getEditVersion();
         DatasetFieldUtil.tidyUpFields(updateVersion.getDatasetFields(), true);
@@ -66,16 +67,19 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         // Merge the new version into our JPA context
         ctxt.em().merge(updateVersion);
 
-
         TermsOfUseAndAccess oldTerms = updateVersion.getTermsOfUseAndAccess();
-        TermsOfUseAndAccess newTerms = getDataset().getOrCreateEditVersion().getTermsOfUseAndAccess();
+        TermsOfUseAndAccess newTerms = newVersion.getTermsOfUseAndAccess();
         newTerms.setDatasetVersion(updateVersion);
         updateVersion.setTermsOfUseAndAccess(newTerms);
         //Put old terms on version that will be deleted....
-        getDataset().getOrCreateEditVersion().setTermsOfUseAndAccess(oldTerms);
+        newVersion.setTermsOfUseAndAccess(oldTerms);
+        
+        //Validate metadata and TofA conditions
+        validateOrDie(updateVersion, isValidateLenient());
+        
         //Also set the fileaccessrequest boolean on the dataset to match the new terms
         getDataset().setFileAccessRequest(updateVersion.getTermsOfUseAndAccess().isFileAccessRequest());
-        List<WorkflowComment> newComments = getDataset().getOrCreateEditVersion().getWorkflowComments();
+        List<WorkflowComment> newComments = newVersion.getWorkflowComments();
         if (newComments!=null && newComments.size() >0) {
             for(WorkflowComment wfc: newComments) {
                 wfc.setDatasetVersion(updateVersion);
@@ -87,7 +91,9 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         // we have to merge to update the database but not flush because
         // we don't want to create two draft versions!
         Dataset tempDataset = ctxt.em().merge(getDataset());
-
+        
+        updateVersion = tempDataset.getLatestVersionForCopy();
+        
         // Look for file metadata changes and update published metadata if needed
         List<FileMetadata> pubFmds = updateVersion.getFileMetadatas();
         int pubFileCount = pubFmds.size();
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java
index f23033f09fa..594d4fe25ba 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommand.java
@@ -34,10 +34,10 @@
 import java.util.Set;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 import javax.net.ssl.SSLContext;
 
 import org.apache.commons.codec.digest.DigestUtils;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java
index 391e798e285..8c643d5cd65 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeaccessionDatasetVersionCommand.java
@@ -16,8 +16,8 @@
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
+import io.gdcc.spi.export.ExportException;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.IOException;
@@ -106,15 +106,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         DatasetVersion version = (DatasetVersion) r;
         Dataset dataset = version.getDataset();
 
-        try {
-            ctxt.index().indexDataset(dataset, true);
-            Future<String> indexString = ctxt.index().indexDataset(dataset, true);
-        } catch (IOException | SolrServerException e) {
-            String failureLogText = "Post-publication indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset);
-            retVal = false;
-        }
+        ctxt.index().asyncIndexDataset(dataset, true);
         return retVal;
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java
new file mode 100644
index 00000000000..c0f863686da
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteCollectionQuotaCommand.java
@@ -0,0 +1,53 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
+import edu.harvard.iq.dataverse.storageuse.StorageQuota;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import java.util.logging.Logger;
+
+/**
+ *
+ * @author landreev
+ *
+ * A superuser-only command:
+ */
+@RequiredPermissions({})
+public class DeleteCollectionQuotaCommand  extends AbstractVoidCommand {
+
+    private static final Logger logger = Logger.getLogger(DeleteCollectionQuotaCommand.class.getCanonicalName());
+    
+    private final Dataverse targetDataverse;
+    
+    public DeleteCollectionQuotaCommand(DataverseRequest aRequest, Dataverse target) {
+        super(aRequest, target);
+        targetDataverse = target;
+    } 
+        
+    @Override
+    public void executeImpl(CommandContext ctxt) throws CommandException {
+        // first check if  user is a superuser
+        if ( (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser() ) ) {      
+            throw new PermissionException(BundleUtil.getStringFromBundle("dataverse.storage.quota.superusersonly"),
+                this,  null, targetDataverse);                
+        }
+        
+        if (targetDataverse == null) {
+            throw new IllegalCommandException("", this);
+        }
+        
+        StorageQuota storageQuota = targetDataverse.getStorageQuota();
+        
+        if (storageQuota != null && storageQuota.getAllocation() != null) {
+            ctxt.dataverses().disableStorageQuota(storageQuota);
+        } 
+        // ... and if no quota was enabled on the collection - nothing to do = success
+    }    
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java
index 92d5064a4ac..e2730ec06d3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDataFileCommand.java
@@ -204,7 +204,7 @@ public FileVisitResult postVisitDirectory(final Path dir, final IOException e)
         }
         GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(ctxt);
         try {
-            if (idServiceBean.alreadyExists(doomed)) {
+            if (idServiceBean.alreadyRegistered(doomed)) {
                 idServiceBean.deleteIdentifier(doomed);
             }
         } catch (Exception e) {
@@ -235,6 +235,20 @@ public String describe() {
 
     @Override
     public boolean onSuccess(CommandContext ctxt, Object r) {
+        // Adjust the storage use for the parent containers: 
+        if (!doomed.isHarvested()) {
+            long storedSize = doomed.getFilesize();
+            // ingested tabular data files also have saved originals that 
+            // are counted as "storage use"
+            Long savedOriginalSize = doomed.getOriginalFileSize(); 
+            if (savedOriginalSize != null) {
+                // Note that DataFile.getFilesize() can return -1 (for "unknown"):
+                storedSize = storedSize > 0 ? storedSize + savedOriginalSize : savedOriginalSize; 
+            }
+            if (storedSize > 0) {
+                ctxt.storageUse().incrementStorageSizeRecursively(doomed.getOwner().getId(), (0L - storedSize));
+            }
+        }
         /**
          * We *could* re-index the entire dataset but it's more efficient to
          * target individual files for deletion, which should always be drafts.
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
index aada2663bf6..f21a2782609 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetLinkingDataverseCommand.java
@@ -66,15 +66,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         Dataset dataset = (Dataset) r;
 
         if (index) {
-            try {
-                ctxt.index().indexDataset(dataset, true);
-            } catch (IOException | SolrServerException e) {
-                String failureLogText = "Post delete linked dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-                failureLogText += "\r\n" + e.getLocalizedMessage();
-                LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset);
-                retVal = false;
-            }
-
+            ctxt.index().asyncIndexDataset(dataset, true);
         }
 
         return retVal;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java
index 3f63c3c6d27..a67d7008ef8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DeleteDatasetVersionCommand.java
@@ -96,13 +96,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
                     }
                 }
                 boolean doNormalSolrDocCleanUp = true;
-                try {
-                    ctxt.index().indexDataset(doomed, doNormalSolrDocCleanUp);
-                } catch (IOException | SolrServerException e) {
-                    String failureLogText = "Post delete version indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + doomed.getId().toString();
-                    failureLogText += "\r\n" + e.getLocalizedMessage();
-                    LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, doomed);
-                }
+                ctxt.index().asyncIndexDataset(doomed, doNormalSolrDocCleanUp);
 
                 return;
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java
index 5da7c6bfec5..41093444360 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DestroyDatasetCommand.java
@@ -101,7 +101,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
         if (!doomed.isHarvested()) {
             GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(ctxt);
             try {
-                if (idServiceBean.alreadyExists(doomed)) {
+                if (idServiceBean.alreadyRegistered(doomed)) {
                     idServiceBean.deleteIdentifier(doomed);
                     for (DataFile df : doomed.getFiles()) {
                         idServiceBean.deleteIdentifier(df);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
index 2ca73af3b3c..d6d7b49d172 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/DuraCloudSubmitToArchiveCommand.java
@@ -21,8 +21,8 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 
 import org.apache.commons.codec.binary.Hex;
 import org.duracloud.client.ContentStore;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
index 12bb3fb6a0a..89cfc732455 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java
@@ -10,6 +10,7 @@
 import edu.harvard.iq.dataverse.DatasetVersionUser;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.Embargo;
 import edu.harvard.iq.dataverse.UserNotification;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -31,15 +32,13 @@
 import java.util.logging.Logger;
 import edu.harvard.iq.dataverse.GlobalIdServiceBean;
 import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
+import edu.harvard.iq.dataverse.dataaccess.StorageIO;
 import edu.harvard.iq.dataverse.engine.command.Command;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import java.util.ArrayList;
 import java.util.concurrent.Future;
 import org.apache.solr.client.solrj.SolrServerException;
 
-import javax.ejb.EJB;
-import javax.inject.Inject;
-
 
 /**
  *
@@ -117,9 +116,37 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         // is this the first publication of the dataset?
         if (theDataset.getPublicationDate() == null) {
             theDataset.setReleaseUser((AuthenticatedUser) getUser());
-        }
-        if ( theDataset.getPublicationDate() == null ) {
+        
             theDataset.setPublicationDate(new Timestamp(new Date().getTime()));
+            
+            // if there are any embargoed files in this version, we will save 
+            // the latest availability date as the "embargoCitationDate" for future 
+            // reference (if the files are not available yet, as of publishing of 
+            // the dataset, this date will be used as the "ciatation date" of the dataset, 
+            // instead of the publicatonDate, in compliance with the DataCite 
+            // best practices). 
+            // the code below replicates the logic that used to be in the method 
+            // Dataset.getCitationDate() that calculated this adjusted date in real time.
+            
+            Timestamp latestEmbargoDate = null; 
+            for (DataFile dataFile : theDataset.getFiles()) {
+                // this is the first version of the dataset that is being published. 
+                // therefore we can iterate through .getFiles() instead of obtaining
+                // the DataFiles by going through the FileMetadatas in the current version.
+                Embargo embargo = dataFile.getEmbargo();
+                if (embargo != null) {
+                    // "dataAvailable" is not nullable in the Embargo class, no need for a null check
+                    Timestamp embargoDate = Timestamp.valueOf(embargo.getDateAvailable().atStartOfDay());
+                    if (latestEmbargoDate == null || latestEmbargoDate.compareTo(embargoDate) < 0) {
+                        latestEmbargoDate = embargoDate;
+                    }
+                }
+            }
+            // the above loop could be easily replaced with a database query; 
+            // but we iterate through .getFiles() elsewhere in the command, when 
+            // updating and/or registering the files, so it should not result in 
+            // an extra performance hit. 
+            theDataset.setEmbargoCitationDate(latestEmbargoDate);
         } 
 
         //Clear any external status
@@ -238,14 +265,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         } catch (Exception e) {
             logger.warning("Failure to send dataset published messages for : " + dataset.getId() + " : " + e.getMessage());
         }
-        try {
-            Future<String> indexString = ctxt.index().indexDataset(dataset, true);                   
-        } catch (IOException | SolrServerException e) {    
-            String failureLogText = "Post-publication indexing failed. You can kick off a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(this, failureLogText,  dataset);
-            retVal = false;
-        }
+        ctxt.index().asyncIndexDataset(dataset, true);                   
         
         //re-indexing dataverses that have additional subjects
         if (!dataversesToIndex.isEmpty()){
@@ -272,7 +292,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
             // Just like with indexing, a failure to export is not a fatal
             // condition. We'll just log the error as a warning and keep
             // going:
-            logger.warning("Finalization: exception caught while exporting: "+ex.getMessage());
+            logger.log(Level.WARNING, "Finalization: exception caught while exporting: "+ex.getMessage(), ex);
             // ... but it is important to only update the export time stamp if the 
             // export was indeed successful.
         }        
@@ -328,7 +348,8 @@ private void validateDataFiles(Dataset dataset, CommandContext ctxt) throws Comm
                     // (the decision was made to validate all the files on every
                     // major release; we can revisit the decision if there's any
                     // indication that this makes publishing take significantly longer.
-                    if (maxFileSize == -1 || dataFile.getFilesize() < maxFileSize) {
+                    String driverId = FileUtil.getStorageDriver(dataFile);
+                    if(StorageIO.isDataverseAccessible(driverId) && maxFileSize == -1 || dataFile.getFilesize() < maxFileSize) {
                         FileUtil.validateDataFileChecksum(dataFile);
                     }
                     else {
@@ -368,12 +389,12 @@ private void publicizeExternalIdentifier(Dataset dataset, CommandContext ctxt) t
         GlobalIdServiceBean idServiceBean = GlobalIdServiceBean.getBean(protocol, ctxt);
  
         if (idServiceBean != null) {
-            List<String> args = idServiceBean.getProviderInformation();
+            
             try {
                 String currentGlobalIdProtocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, "");
                 String currentGlobalAuthority = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Authority, "");
                 String dataFilePIDFormat = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT");
-                boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabled();
+                boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabledForCollection(getDataset().getOwner());
                 // We will skip trying to register the global identifiers for datafiles 
                 // if "dependent" file-level identifiers are requested, AND the naming 
                 // protocol, or the authority of the dataset global id is different from 
@@ -406,12 +427,12 @@ private void publicizeExternalIdentifier(Dataset dataset, CommandContext ctxt) t
                 dataset.setIdentifierRegistered(true);
             } catch (Throwable e) {
                 logger.warning("Failed to register the identifier "+dataset.getGlobalId().asString()+", or to register a file in the dataset; notifying the user(s), unlocking the dataset");
-
+                
                 // Send failure notification to the user: 
                 notifyUsersDatasetPublishStatus(ctxt, dataset, UserNotification.Type.PUBLISHFAILED_PIDREG);
                 
                 ctxt.datasets().removeDatasetLocks(dataset, DatasetLock.Reason.finalizePublication);
-                throw new CommandException(BundleUtil.getStringFromBundle("dataset.publish.error", args), this);
+                throw new CommandException(BundleUtil.getStringFromBundle("dataset.publish.error", idServiceBean.getProviderInformation()), this);
             }
         }
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionQuotaCommand.java
new file mode 100644
index 00000000000..49f14e7c280
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionQuotaCommand.java
@@ -0,0 +1,51 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import java.util.logging.Logger;
+
+/**
+ *
+ * @author landreev
+ * The command doesn't do much. It's sole purpose is to check the permissions
+ * when it's called by the /api/dataverses/.../storage/quota api. 
+ */
+// @RequiredPermissions - none defined, dynamic
+public class GetCollectionQuotaCommand  extends AbstractCommand<Long> {
+
+    private static final Logger logger = Logger.getLogger(GetCollectionQuotaCommand.class.getCanonicalName());
+    
+    private final Dataverse dataverse;
+    
+    public GetCollectionQuotaCommand(DataverseRequest aRequest, Dataverse target) {
+        super(aRequest, target);
+        dataverse = target;
+    } 
+        
+    @Override
+    public Long execute(CommandContext ctxt) throws CommandException {
+               
+        if (dataverse != null && dataverse.getStorageQuota() != null) {
+            return dataverse.getStorageQuota().getAllocation();
+        }
+        
+        return null;
+    }
+
+    @Override
+    public Map<String, Set<Permission>> getRequiredPermissions() {
+        return Collections.singletonMap("",
+                dataverse.isReleased() ? Collections.<Permission>emptySet()
+                : Collections.singleton(Permission.ViewUnpublishedDataverse));
+    }    
+}
+
+    
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionStorageUseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionStorageUseCommand.java
new file mode 100644
index 00000000000..c30a5a34a81
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetCollectionStorageUseCommand.java
@@ -0,0 +1,45 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import java.util.logging.Logger;
+
+/**
+ *
+ * @author landreev
+ */
+@RequiredPermissions(Permission.ManageDataversePermissions)
+// alternatively, we could make it dynamic - public for published collections
+// and Permission.ViewUnpublishedDataverse required otherwise (?)
+public class GetCollectionStorageUseCommand extends AbstractCommand<Long> {
+
+    private static final Logger logger = Logger.getLogger(GetCollectionStorageUseCommand.class.getCanonicalName());
+    
+    private final Dataverse collection;
+    
+    public GetCollectionStorageUseCommand(DataverseRequest aRequest, Dataverse target) {
+        super(aRequest, target);
+        collection = target;
+    } 
+        
+    @Override
+    public Long execute(CommandContext ctxt) throws CommandException {
+               
+        if (collection == null) {
+            throw new CommandException("null collection passed to get storage use command", this);
+        }
+        return ctxt.storageUse().findStorageSizeByDvContainerId(collection.getId());        
+    }
+
+    /*@Override
+    public Map<String, Set<Permission>> getRequiredPermissions() {
+        return Collections.singletonMap("",
+                dataverse.isReleased() ? Collections.<Permission>emptySet()
+                : Collections.singleton(Permission.ViewUnpublishedDataverse));
+    }*/   
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetSchemaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetSchemaCommand.java
new file mode 100644
index 00000000000..2d5e1251614
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetSchemaCommand.java
@@ -0,0 +1,38 @@
+
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+
+
+import java.util.logging.Logger;
+
+/**
+ *
+ * @author stephenkraffmiller
+ */
+@RequiredPermissions(Permission.AddDataset)
+public class GetDatasetSchemaCommand extends AbstractCommand<String> {
+    
+    private static final Logger logger = Logger.getLogger(GetDatasetSchemaCommand.class.getCanonicalName());
+    
+    private final Dataverse dataverse;
+    
+    public GetDatasetSchemaCommand(DataverseRequest aRequest, Dataverse target) {
+        super(aRequest, target);
+        dataverse = target;
+    }
+
+    @Override
+    public String execute(CommandContext ctxt) throws CommandException {            
+            return ctxt.dataverses().getCollectionDatasetSchema(dataverse.getAlias());                   
+    }
+    
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java
index f1f27fdcee2..09b33c4efc4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java
@@ -7,7 +7,6 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
@@ -15,6 +14,7 @@
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+
 import java.io.IOException;
 import java.util.Collections;
 import java.util.Map;
@@ -38,7 +38,7 @@ public class GetDatasetStorageSizeCommand extends AbstractCommand<Long> {
     public enum Mode {
 
         STORAGE, DOWNLOAD
-    };
+    }
 
     public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target) {
         super(aRequest, target);
@@ -58,21 +58,20 @@ public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, b
 
     @Override
     public Long execute(CommandContext ctxt) throws CommandException {
-        logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName());
-
         if (dataset == null) {
             // should never happen - must indicate some data corruption in the database
             throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.listing.error"), this);
         }
 
+        logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName());
+
         try {
             return ctxt.datasets().findStorageSize(dataset, countCachedFiles, mode, version);
         } catch (IOException ex) {
             throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.datasize.ioerror"), this);
         }
-
     }
-    
+
     @Override
     public Map<String, Set<Permission>> getRequiredPermissions() {
         // for data file check permission on owning dataset
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java
index 680a5c3aaef..1454a4b1fdd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java
@@ -17,29 +17,30 @@
 
 /**
  * Get the latest version of a dataset a user can view.
+ *
  * @author Naomi
  */
 // No permission needed to view published dvObjects
 @RequiredPermissions({})
-public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand<DatasetVersion>{
+public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand<DatasetVersion> {
     private final Dataset ds;
+    private final boolean includeDeaccessioned;
 
     public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) {
+        this(aRequest, anAffectedDataset, false);
+    }
+
+    public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) {
         super(aRequest, anAffectedDataset);
         ds = anAffectedDataset;
+        this.includeDeaccessioned = includeDeaccessioned;
     }
 
     @Override
     public DatasetVersion execute(CommandContext ctxt) throws CommandException {
-
         if (ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) {
             return ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds));
         }
-
-        return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds));
-
+        return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned));
     }
-    
-    
-    
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java
index 18adff2e55c..4e4252fd155 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java
@@ -2,6 +2,7 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -9,26 +10,31 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 
 /**
- *
  * @author Naomi
  */
 // No permission needed to view published dvObjects
 @RequiredPermissions({})
-public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand<DatasetVersion>{
+public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand<DatasetVersion> {
     private final Dataset ds;
-    
+    private final boolean includeDeaccessioned;
+
     public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) {
+        this(aRequest, anAffectedDataset, false);
+    }
+
+    public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) {
         super(aRequest, anAffectedDataset);
         ds = anAffectedDataset;
+        this.includeDeaccessioned = includeDeaccessioned;
     }
 
     @Override
     public DatasetVersion execute(CommandContext ctxt) throws CommandException {
-        for (DatasetVersion dsv: ds.getVersions()) {
-            if (dsv.isReleased()) {
+        for (DatasetVersion dsv : ds.getVersions()) {
+            if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) {
                 return dsv;
-                }
             }
-        return null;
         }
-    }
\ No newline at end of file
+        return null;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java
index 23f08aadd3e..b98cd70a4da 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetProvJsonCommand.java
@@ -9,12 +9,12 @@
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.JsonObject;
 
 @RequiredPermissions(Permission.EditDataset)
 public class GetProvJsonCommand extends AbstractCommand<JsonObject> {
@@ -35,13 +35,13 @@ public JsonObject execute(CommandContext ctxt) throws CommandException {
 
         try {
             StorageIO<DataFile> dataAccess = dataFile.getStorageIO();
-            InputStream inputStream = dataAccess.getAuxFileAsInputStream(provJsonExtension);
-            JsonObject jsonObject = null;
-            if(null != inputStream) {
-                JsonReader jsonReader = Json.createReader(inputStream);
-                jsonObject = jsonReader.readObject();
+            try (InputStream inputStream = dataAccess.getAuxFileAsInputStream(provJsonExtension)) {
+                JsonObject jsonObject = null;
+                if (null != inputStream) {
+                    jsonObject = JsonUtil.getJsonObject(inputStream);
+                }
+                return jsonObject;
             }
-            return jsonObject;
         } catch (IOException ex) {
             String error = "Exception caught in DataAccess.getStorageIO(dataFile) getting file. Error: " + ex;
             throw new IllegalCommandException(error, this);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java
index 3efb38e4a91..a87eb8a99a5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java
@@ -8,6 +8,7 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
@@ -15,27 +16,32 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 
 /**
- *
  * @author Naomi
  */
 // No permission needed to view published dvObjects
 @RequiredPermissions({})
-public class GetSpecificPublishedDatasetVersionCommand extends AbstractCommand<DatasetVersion>{
+public class GetSpecificPublishedDatasetVersionCommand extends AbstractCommand<DatasetVersion> {
     private final Dataset ds;
     private final long majorVersion;
     private final long minorVersion;
-    
+    private boolean includeDeaccessioned;
+
     public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, long majorVersionNum, long minorVersionNum) {
+        this(aRequest, anAffectedDataset, majorVersionNum, minorVersionNum, false);
+    }
+
+    public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, long majorVersionNum, long minorVersionNum, boolean includeDeaccessioned) {
         super(aRequest, anAffectedDataset);
         ds = anAffectedDataset;
         majorVersion = majorVersionNum;
         minorVersion = minorVersionNum;
+        this.includeDeaccessioned = includeDeaccessioned;
     }
 
     @Override
     public DatasetVersion execute(CommandContext ctxt) throws CommandException {
-        for (DatasetVersion dsv: ds.getVersions()) {
-            if (dsv.isReleased()) {
+        for (DatasetVersion dsv : ds.getVersions()) {
+            if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) {
                 if (dsv.getVersionNumber().equals(majorVersion) && dsv.getMinorVersionNumber().equals(minorVersion)) {
                     return dsv;
                 }
@@ -43,5 +49,4 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException {
         }
         return null;
     }
-    
-}
\ No newline at end of file
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java
index f3324ba6f2e..df0b5d785e4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetUserTracesCommand.java
@@ -17,14 +17,14 @@
 import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
-import java.math.BigDecimal;
+
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 // Superuser-only enforced below.
 @RequiredPermissions({})
@@ -212,7 +212,7 @@ public JsonObjectBuilder execute(CommandContext ctxt) throws CommandException {
                         try {
                             JsonObjectBuilder gbe = Json.createObjectBuilder()
                                     .add("id", guestbookResponse.getId())
-                                    .add("downloadType", guestbookResponse.getDownloadtype())
+                                    .add("eventType", guestbookResponse.getEventType())
                                     .add("filename", guestbookResponse.getDataFile().getCurrentName())
                                     .add("date", guestbookResponse.getResponseDate())
                                     .add("guestbookName", guestbookResponse.getGuestbook().getName());
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
index da2701a41e7..512987866d4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GoogleCloudSubmitToArchiveCommand.java
@@ -19,8 +19,8 @@
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult;
 import org.apache.commons.codec.binary.Hex;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportDatasetCommand.java
index 807472cda08..478272950bd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportDatasetCommand.java
@@ -58,8 +58,8 @@ protected void additionalParameterTests(CommandContext ctxt) throws CommandExcep
             throw new IllegalCommandException("Imported datasets must have a persistent global identifier.", this);
         }
         
-        if ( ! ctxt.datasets().isIdentifierLocallyUnique(ds) ) {
-            throw new IllegalCommandException("Persistent identifier " + ds.getGlobalIdString() + " already exists in this Dataverse installation.", this);
+        if ( ! ctxt.dvObjects().isGlobalIdLocallyUnique(ds.getGlobalId()) ) {
+            throw new IllegalCommandException("Persistent identifier " + ds.getGlobalId().asString() + " already exists in this Dataverse installation.", this);
         }
         
         String pid = ds.getPersistentURL();
@@ -79,15 +79,10 @@ protected void additionalParameterTests(CommandContext ctxt) throws CommandExcep
                  * that exist (and accessible in the PID provider account configured in
                  * Dataverse) but aren't findable to be used. That could be the case if, for
                  * example, someone was importing a draft dataset from elsewhere.
-                 * 
-                 * Also note that just replacing the call above with the alreadyExists() call
-                 * here would break import cases where a DOI is public but not managable with
-                 * the currently configured PID provider credentials. If this is not a valid use
-                 * case, the GET above could be removed.
                  */
                 GlobalIdServiceBean globalIdServiceBean = GlobalIdServiceBean.getBean(ds.getProtocol(), ctxt);
                 if (globalIdServiceBean != null) {
-                    if (globalIdServiceBean.alreadyExists(ds)) {
+                    if (globalIdServiceBean.alreadyRegistered(ds.getGlobalId(), true)) {
                         return;
                     }
                 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java
index 5f31ea756eb..9a75f437b66 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ImportFromFileSystemCommand.java
@@ -14,12 +14,12 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 
-import javax.batch.operations.JobOperator;
-import javax.batch.operations.JobSecurityException;
-import javax.batch.operations.JobStartException;
-import javax.batch.runtime.BatchRuntime;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.batch.operations.JobOperator;
+import jakarta.batch.operations.JobSecurityException;
+import jakarta.batch.operations.JobStartException;
+import jakarta.batch.runtime.BatchRuntime;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 import java.io.File;
 import java.util.Properties;
 import java.util.logging.Level;
@@ -83,7 +83,7 @@ public JsonObject execute(CommandContext ctxt) throws CommandException {
             //       We probably want package files to be able to use specific stores instead.
             //       More importantly perhaps, the approach above does not take into account
             //       if the dataset may have an AlternativePersistentIdentifier, that may be
-            //       designated isStorageLocationDesignator() - i.e., if a different identifer
+            //       designated isStorageLocationDesignator() - i.e., if a different identifier
             //       needs to be used to name the storage directory, instead of the main/current
             //       persistent identifier above.
             if (!isValidDirectory(directory)) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java
index da70529e76d..aef749d7e26 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDatasetCommand.java
@@ -69,14 +69,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         boolean retVal = true;
         DatasetLinkingDataverse dld = (DatasetLinkingDataverse) r;
 
-        try {
-            ctxt.index().indexDataset(dld.getDataset(), true);
-        } catch (IOException | SolrServerException e) {
-            String failureLogText = "Post link dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dld.getDataset().getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dld.getDataset());
-            retVal = false;
-        }
+        ctxt.index().asyncIndexDataset(dld.getDataset(), true);
 
         return retVal;
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
index 1c63a1a3c4f..55fe96556a5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LinkDataverseCommand.java
@@ -23,8 +23,8 @@
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Date;
-import java.util.concurrent.Future;
-import javax.ws.rs.core.Response;
+
+import jakarta.ws.rs.core.Response;
 import org.apache.solr.client.solrj.SolrServerException;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java
index da9e5adf247..d82b2e7a81d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListRolesCommand.java
@@ -27,7 +27,7 @@ public ListRolesCommand(DataverseRequest aRequest, Dataverse aDefinitionPoint) {
 
     @Override
     public Set<DataverseRole> execute(CommandContext ctxt) throws CommandException {
-        return definitionPoint.getRoles();
+        return ctxt.roles().availableRoles(definitionPoint.getId());
     }
 
     
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java
index 51283f29156..b93833ffdf9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java
@@ -14,6 +14,7 @@
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException;
 import java.util.LinkedList;
 import java.util.List;
 
@@ -23,23 +24,57 @@
  */
 // No permission needed to view published dvObjects
 @RequiredPermissions({})
-public class ListVersionsCommand extends AbstractCommand<List<DatasetVersion>>{
-    
+public class ListVersionsCommand extends AbstractCommand<List<DatasetVersion>> {
+
     private final Dataset ds;
+    private final Integer limit; 
+    private final Integer offset;
+    private final Boolean deepLookup; 
     
-	public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset) {
-		super(aRequest, aDataset);
-		ds = aDataset;
-	}
+    public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset) {
+        this(aRequest, aDataset, null, null);
+    }
+    
+    public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset, Integer offset, Integer limit) {
+        this(aRequest, aDataset, null, null, false);
+    }
+
+    public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset, Integer offset, Integer limit, boolean deepLookup) {
+        super(aRequest, aDataset);
+        ds = aDataset;
+        this.offset = offset; 
+        this.limit = limit; 
+        this.deepLookup = deepLookup; 
+    }
 
-	@Override
-	public List<DatasetVersion> execute(CommandContext ctxt) throws CommandException {
-		List<DatasetVersion> outputList = new LinkedList<>();
-		for ( DatasetVersion dsv : ds.getVersions() ) {
-            if (dsv.isReleased() || ctxt.permissions().request( getRequest() ).on(ds).has(Permission.EditDataset)) {
-                outputList.add(dsv);
+    @Override
+    public List<DatasetVersion> execute(CommandContext ctxt) throws CommandException {
+        
+        boolean includeUnpublished = ctxt.permissions().request(getRequest()).on(ds).has(Permission.EditDataset);
+        
+        if (offset == null && limit == null) { 
+            
+            List<DatasetVersion> outputList = new LinkedList<>();
+            for (DatasetVersion dsv : ds.getVersions()) {
+                if (dsv.isReleased() || includeUnpublished) {
+                    if (deepLookup) {
+                        // @todo: when "deep"/extended lookup is requested, and 
+                        // we call .findDeep() to look up each version again, 
+                        // there is probably a more economical way to obtain the 
+                        // numeric ids of the versions, by a direct single query,
+                        // rather than go through ds.getVersions() like we are now. 
+                        dsv = ctxt.datasetVersion().findDeep(dsv.getId());
+                        if (dsv == null) {
+                            throw new CommandExecutionException("Failed to look up full list of dataset versions", this);
+                        }
+                    }
+                    outputList.add(dsv);
+                }
             }
-		}
-        return outputList;
-	}
+            return outputList;
+        } else {
+            // Only a partial list (one "page"-worth) of versions is being requested
+            return ctxt.datasetVersion().findVersions(ds.getId(), offset, limit, includeUnpublished);
+        }
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java
index c7e91b2967b..d2f061b6e70 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/LocalSubmitToArchiveCommand.java
@@ -17,8 +17,8 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 
 import java.io.File;
 import java.io.FileOutputStream;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java
index 55d02362e88..94bcfa2f5b7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommand.java
@@ -141,15 +141,8 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
         moved.setOwner(destination);
         ctxt.em().merge(moved);
 
-        try {
-            boolean doNormalSolrDocCleanUp = true;
-            ctxt.index().indexDataset(moved, doNormalSolrDocCleanUp);
-
-        } catch (Exception e) { // RuntimeException e ) {
-            logger.log(Level.WARNING, "Exception while indexing:" + e.getMessage()); //, e);
-            throw new CommandException(BundleUtil.getStringFromBundle("dashboard.card.datamove.dataset.command.error.indexingProblem"), this);
-
-        }
+        boolean doNormalSolrDocCleanUp = true;
+        ctxt.index().asyncIndexDataset(moved, doNormalSolrDocCleanUp);
 
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java
index dcae4e039e6..ea38f5a7af7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommand.java
@@ -302,14 +302,7 @@ public void executeImpl(CommandContext ctxt) throws CommandException {
         if (moved.getDatasetLinkingDataverses() != null && !moved.getDatasetLinkingDataverses().isEmpty()) {
             for (DatasetLinkingDataverse dld : moved.getDatasetLinkingDataverses()) {
                 Dataset linkedDS = ctxt.datasets().find(dld.getDataset().getId());
-                try {
-                    ctxt.index().indexDataset(linkedDS, true);
-                } catch (IOException | SolrServerException e) {
-                    String failureLogText = "Post move dataverse dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + linkedDS.getId().toString();
-                    failureLogText += "\r\n" + e.getLocalizedMessage();
-                    LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, linkedDS);
-
-                }
+                ctxt.index().asyncIndexDataset(linkedDS, true);
 
             }
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
index 8a0e9b91066..f5ef121dee2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/PublishDatasetCommand.java
@@ -23,6 +23,7 @@
 import static java.util.stream.Collectors.joining;
 import static edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult.Status;
 import static edu.harvard.iq.dataverse.dataset.DatasetUtil.validateDatasetMetadataExternally;
+import edu.harvard.iq.dataverse.util.StringUtil;
 
 
 /**
@@ -134,7 +135,7 @@ public PublishDatasetResult execute(CommandContext ctxt) throws CommandException
             String dataFilePIDFormat = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT");
             boolean registerGlobalIdsForFiles = 
                     (currentGlobalIdProtocol.equals(theDataset.getProtocol()) || dataFilePIDFormat.equals("INDEPENDENT")) 
-                    && ctxt.systemConfig().isFilePIDsEnabled();
+                    && ctxt.systemConfig().isFilePIDsEnabledForCollection(theDataset.getOwner());
             
             if ( registerGlobalIdsForFiles ){
                 registerGlobalIdsForFiles = currentGlobalAuthority.equals( theDataset.getAuthority() );
@@ -204,6 +205,12 @@ private void verifyCommandArguments(CommandContext ctxt) throws IllegalCommandEx
             throw new IllegalCommandException("Only authenticated users can release a Dataset. Please authenticate and try again.", this);
         }
         
+        if (getDataset().getLatestVersion().getTermsOfUseAndAccess() == null
+                || (getDataset().getLatestVersion().getTermsOfUseAndAccess().getLicense() == null 
+                && StringUtil.isEmpty(getDataset().getLatestVersion().getTermsOfUseAndAccess().getTermsOfUse()))) {
+            throw new IllegalCommandException("Dataset must have a valid license or Custom Terms Of Use configured before it can be published.", this);
+        }
+        
         if ( (getDataset().isLockedFor(DatasetLock.Reason.Workflow)&&!ctxt.permissions().isMatchingWorkflowLock(getDataset(),request.getUser().getIdentifier(),request.getWFInvocationId())) 
                 || getDataset().isLockedFor(DatasetLock.Reason.Ingest) 
                 || getDataset().isLockedFor(DatasetLock.Reason.finalizePublication)
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java
index 286b107a5fd..b9346a43af8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RedetectFileTypeCommand.java
@@ -9,8 +9,8 @@
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
+import io.gdcc.spi.export.ExportException;
 import edu.harvard.iq.dataverse.util.EjbUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
 
@@ -20,7 +20,7 @@
 import java.nio.channels.FileChannel;
 import java.nio.channels.ReadableByteChannel;
 import java.util.logging.Logger;
-import javax.ejb.EJBException;
+import jakarta.ejb.EJBException;
 
 @RequiredPermissions(Permission.EditDataset)
 public class RedetectFileTypeCommand extends AbstractCommand<DataFile> {
@@ -83,12 +83,8 @@ public DataFile execute(CommandContext ctxt) throws CommandException {
                 throw new CommandException("Exception while attempting to save the new file type: " + EjbUtil.ejbExceptionToString(ex), this);
             }
             Dataset dataset = fileToRedetect.getOwner();
-            try {
-                boolean doNormalSolrDocCleanUp = true;
-                ctxt.index().indexDataset(dataset, doNormalSolrDocCleanUp);
-            } catch (Exception ex) {
-                logger.info("Exception while reindexing files during file type redetection: " + ex.getLocalizedMessage());
-            }
+            boolean doNormalSolrDocCleanUp = true;
+            ctxt.index().asyncIndexDataset(dataset, doNormalSolrDocCleanUp);
             try {
                 ExportService instance = ExportService.getInstance();
                 instance.exportAllFormats(dataset);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java
index 9169d6b4fe9..779bc7fb7fe 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RegisterDvObjectCommand.java
@@ -14,6 +14,7 @@
 import java.sql.Timestamp;
 import java.util.Date;
 import edu.harvard.iq.dataverse.GlobalIdServiceBean;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import java.io.IOException;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -57,10 +58,10 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
             //if so, leave.
             if (target.getIdentifier() == null || target.getIdentifier().isEmpty()) {
                 if (target.isInstanceofDataset()) {
-                    target.setIdentifier(ctxt.datasets().generateDatasetIdentifier((Dataset) target, idServiceBean));
+                    target.setIdentifier(idServiceBean.generateDatasetIdentifier((Dataset) target));
 
                 } else {
-                    target.setIdentifier(ctxt.files().generateDataFileIdentifier((DataFile) target, idServiceBean));
+                    target.setIdentifier(idServiceBean.generateDataFileIdentifier((DataFile) target));
                 }
                 if (target.getProtocol() == null) {
                     target.setProtocol(protocol);
@@ -69,7 +70,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
                     target.setAuthority(authority);
                 }
             }
-            if (idServiceBean.alreadyExists(target)) {
+            if (idServiceBean.alreadyRegistered(target)) {
                 return;
             }
             String doiRetString = idServiceBean.createIdentifier(target);
@@ -94,7 +95,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
                     Dataset dataset = (Dataset) target;
                     for (DataFile df : dataset.getFiles()) {
                         if (df.getIdentifier() == null || df.getIdentifier().isEmpty()) {
-                            df.setIdentifier(ctxt.files().generateDataFileIdentifier(df, idServiceBean));
+                            df.setIdentifier(idServiceBean.generateDataFileIdentifier(df));
                             if (df.getProtocol() == null || df.getProtocol().isEmpty()) {
                                 df.setProtocol(protocol);
                             }
@@ -136,22 +137,15 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
             //Only continue if you can successfully migrate the handle
             boolean doNormalSolrDocCleanUp = true;
             Dataset dataset = (Dataset) target;
-            try {
-                ctxt.index().indexDataset(dataset, doNormalSolrDocCleanUp);
-                ctxt.solrIndex().indexPermissionsForOneDvObject( dataset);
-            } catch (IOException | SolrServerException e) {
-                String failureLogText = "Post migrate handle dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-                failureLogText += "\r\n" + e.getLocalizedMessage();
-                LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset);
-
-            }
+            ctxt.index().asyncIndexDataset(dataset, doNormalSolrDocCleanUp);
+            ctxt.solrIndex().indexPermissionsForOneDvObject( dataset);
         }
     }
     
     private Boolean processMigrateHandle (CommandContext ctxt){
         boolean retval = true;
         if(!target.isInstanceofDataset()) return false;
-        if(!target.getProtocol().equals(GlobalId.HDL_PROTOCOL)) return false;
+        if(!target.getProtocol().equals(HandlenetServiceBean.HDL_PROTOCOL)) return false;
         
         AlternativePersistentIdentifier api = new AlternativePersistentIdentifier();
         api.setProtocol(target.getProtocol());
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java
index 2fc3a5c525e..bf291427341 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/RequestAccessCommand.java
@@ -5,7 +5,13 @@
  */
 package edu.harvard.iq.dataverse.engine.command.impl;
 
+import java.util.Arrays;
+import java.util.List;
+import java.util.logging.Logger;
+
 import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.FileAccessRequest;
+import edu.harvard.iq.dataverse.GuestbookResponse;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
@@ -22,25 +28,39 @@
  */
 @RequiredPermissions({})
 public class RequestAccessCommand extends AbstractCommand<DataFile> {
-    
+
+    private static final Logger logger = Logger.getLogger(RequestAccessCommand.class.getName());
+
     private final DataFile file;
     private final AuthenticatedUser requester;
+    private final FileAccessRequest fileAccessRequest;
     private final Boolean sendNotification;
 
-
     public RequestAccessCommand(DataverseRequest dvRequest, DataFile file) {
         // for data file check permission on owning dataset
-        super(dvRequest, file);        
-        this.file = file;        
+        this(dvRequest, file, false);
+    }
+
+    public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, Boolean sendNotification) {
+        // for data file check permission on owning dataset
+        super(dvRequest, file);
+        this.file = file;
         this.requester = (AuthenticatedUser) dvRequest.getUser();
-        this.sendNotification = false;
+        this.fileAccessRequest = new FileAccessRequest(file, requester);
+        this.sendNotification = sendNotification;
+    }
+
+    public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr) {
+        this(dvRequest, file, gbr, false);
     }
-    
-        public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, Boolean sendNotification) {
+
+    public RequestAccessCommand(DataverseRequest dvRequest, DataFile file, GuestbookResponse gbr,
+            Boolean sendNotification) {
         // for data file check permission on owning dataset
-        super(dvRequest, file);        
-        this.file = file;        
+        super(dvRequest, file);
+        this.file = file;
         this.requester = (AuthenticatedUser) dvRequest.getUser();
+        this.fileAccessRequest = new FileAccessRequest(file, requester, gbr);
         this.sendNotification = sendNotification;
     }
 
@@ -50,21 +70,36 @@ public DataFile execute(CommandContext ctxt) throws CommandException {
         if (!file.getOwner().isFileAccessRequest()) {
             throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed"), this);
         }
-        
-        //if user already has permission to download file or the file is public throw command exception
-        if (!file.isRestricted() || ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)) {
-            throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.alreadyHasDownloadPermisssion"), this);
+
+        // if user already has permission to download file or the file is public throw
+        // command exception
+        logger.fine("User: " + this.getRequest().getAuthenticatedUser().getName());
+        logger.fine("File: " + file.getId() + " : restricted?: " + file.isRestricted());
+        logger.fine(
+                "permission?: " + ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile));
+        if (!file.isRestricted()
+                || ctxt.permissions().requestOn(this.getRequest(), file).has(Permission.DownloadFile)) {
+            throw new CommandException(
+                    BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.alreadyHasDownloadPermisssion"),
+                    this);
         }
 
-        if(FileUtil.isActivelyEmbargoed(file)) {
+        if (FileUtil.isActivelyEmbargoed(file)) {
             throw new CommandException(BundleUtil.getStringFromBundle("file.requestAccess.notAllowed.embargoed"), this);
         }
-        file.getFileAccessRequesters().add(requester);
+        file.addFileAccessRequest(fileAccessRequest);
+        List<FileAccessRequest> fars = requester.getFileAccessRequests();
+        if(fars!=null) {
+            fars.add(fileAccessRequest);
+        } else {
+            requester.setFileAccessRequests(Arrays.asList(fileAccessRequest));
+        }
+        DataFile savedFile = ctxt.files().save(file);
         if (sendNotification) {
-            ctxt.fileDownload().sendRequestFileAccessNotification(this.file, requester);
+            logger.fine("ctxt.fileDownload().sendRequestFileAccessNotification(savedFile, requester);");
+            ctxt.fileDownload().sendRequestFileAccessNotification(savedFile.getOwner(), savedFile.getId(), requester);
         }
-        return ctxt.files().save(file);
+        return savedFile;
     }
 
 }
-
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java
index ba0348f57d6..caf37ad4de1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommand.java
@@ -72,14 +72,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         boolean retVal = true;
         Dataset dataset = (Dataset) r;
 
-        try {
-            Future<String> indexString = ctxt.index().indexDataset(dataset, true);
-        } catch (IOException | SolrServerException e) {
-            String failureLogText = "Post return to author indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset);
-            retVal = false;
-        }
+        ctxt.index().asyncIndexDataset(dataset, true);
 
         return retVal;
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
index f24d956e9d7..f02edd54b86 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/S3SubmitToArchiveCommand.java
@@ -20,9 +20,9 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
 import org.eclipse.microprofile.config.Config;
 import org.eclipse.microprofile.config.ConfigProvider;
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java
new file mode 100644
index 00000000000..e52c47a5e7d
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCollectionQuotaCommand.java
@@ -0,0 +1,53 @@
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.engine.command.AbstractVoidCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import java.util.logging.Logger;
+
+/**
+ *
+ * @author landreev
+ *
+ * A superuser-only command:
+ */
+@RequiredPermissions({})
+public class SetCollectionQuotaCommand  extends AbstractVoidCommand {
+
+    private static final Logger logger = Logger.getLogger(GetCollectionQuotaCommand.class.getCanonicalName());
+    
+    private final Dataverse dataverse;
+    private final Long allocation; 
+    
+    public SetCollectionQuotaCommand(DataverseRequest aRequest, Dataverse target, Long allocation) {
+        super(aRequest, target);
+        dataverse = target;
+        this.allocation = allocation; 
+    } 
+        
+    @Override
+    public void executeImpl(CommandContext ctxt) throws CommandException {
+        // Check if user is a superuser:
+        if ( (!(getUser() instanceof AuthenticatedUser) || !getUser().isSuperuser() ) ) {      
+            throw new PermissionException(BundleUtil.getStringFromBundle("dataverse.storage.quota.superusersonly"),
+                this,  null, dataverse);                
+        }
+        
+        if (dataverse == null) {
+            throw new IllegalCommandException("Must specify valid collection", this);
+        }
+        
+        if (allocation == null) {
+            throw new IllegalCommandException("Must specify valid allocation in bytes", this);
+        }
+        
+        ctxt.dataverses().saveStorageQuota(dataverse, allocation);
+    }    
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java
index 72f0ef335fb..557f9dff622 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetCurationStatusCommand.java
@@ -101,14 +101,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         boolean retVal = true;
         Dataset dataset = (Dataset) r;
 
-        try {
-            Future<String> indexString = ctxt.index().indexDataset(dataset, true);
-        } catch (IOException | SolrServerException e) {
-            String failureLogText = "Post submit for review indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset);
-            retVal = false;
-        }
+        ctxt.index().asyncIndexDataset(dataset, true);
         return retVal;
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetDatasetCitationDateCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetDatasetCitationDateCommand.java
index fe14d56562d..2cae9e51896 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetDatasetCitationDateCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SetDatasetCitationDateCommand.java
@@ -49,14 +49,8 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         boolean retVal = true;
         Dataset dataset = (Dataset) r;
 
-        try {
-            Future<String> indexString = ctxt.index().indexDataset(dataset, false);
-        } catch (IOException | SolrServerException e) {
-            String failureLogText = "Post set dataset citation date indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset);
-            retVal = false;
-        }
+        ctxt.index().asyncIndexDataset(dataset, false);
+
         return retVal;
     }	
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java
index 130030798ab..77a4bf5b8ba 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommand.java
@@ -75,14 +75,8 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         boolean retVal = true;
         Dataset dataset = (Dataset) r;
 
-        try {
-            Future<String> indexString = ctxt.index().indexDataset(dataset, true);
-        } catch (IOException | SolrServerException e) {
-            String failureLogText = "Post submit for review indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset);
-            retVal = false;
-        }
+        ctxt.index().asyncIndexDataset(dataset, true);
+
         return retVal;
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java
index 29180f65e36..3e85630dd59 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UningestFileCommand.java
@@ -22,11 +22,11 @@
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
 import edu.harvard.iq.dataverse.util.FileUtil;
-import edu.harvard.iq.dataverse.util.StringUtil;
+
 import java.io.IOException;
 import java.util.Collections;
 import java.util.logging.Logger;
-import javax.persistence.Query;
+import jakarta.persistence.Query;
 
 /**
  *
@@ -105,6 +105,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
         // all the attribute of the file that are stored in the database: 
         
         // the file size: 
+        long archivalFileSize = uningest.getFilesize();
         uningest.setFilesize(storedOriginalFileSize);
         
         // original file format:
@@ -170,8 +171,20 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
             logger.warning("Io Exception deleting all aux objects : " + uningest.getId());
         }
         
+        // Finally, adjust the recorded storage use for the ancestral 
+        // DvObjectContainers (the parent dataset + all the parent collections
+        // up to the root):
+        if (archivalFileSize > 0) {
+            ctxt.storageUse().incrementStorageSizeRecursively(uningest.getOwner().getId(), (0L - archivalFileSize));
+        }
+        
     }
     
+    @Override
+    public boolean onSuccess(CommandContext ctxt, Object r) {
+        
+        return true; 
+    }
     
     private void resetIngestStats(DataFile uningest, CommandContext ctxt){
         
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
index 33f64f23076..7591bebe796 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetVersionCommand.java
@@ -3,25 +3,21 @@
 import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.DatasetFieldUtil;
 import edu.harvard.iq.dataverse.util.FileMetadataUtil;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.concurrent.Future;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.validation.ConstraintViolationException;
-
-import org.apache.solr.client.solrj.SolrServerException;
+import jakarta.validation.ConstraintViolationException;
 
 /**
  *
@@ -30,11 +26,11 @@
 @RequiredPermissions(Permission.EditDataset)
 public class UpdateDatasetVersionCommand extends AbstractDatasetCommand<Dataset> {
 
-    private static final Logger logger = Logger.getLogger(UpdateDatasetVersionCommand.class.getCanonicalName());
+    static final Logger logger = Logger.getLogger(UpdateDatasetVersionCommand.class.getCanonicalName());
     private final List<FileMetadata> filesToDelete;
     private boolean validateLenient = false;
     private final DatasetVersion clone;
-    private final FileMetadata fmVarMet;
+    final FileMetadata fmVarMet;
     
     public UpdateDatasetVersionCommand(Dataset theDataset, DataverseRequest aRequest) {
         super(aRequest, theDataset);
@@ -101,9 +97,26 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
         }
         
         Dataset theDataset = getDataset();        
-        ctxt.permissions().checkEditDatasetLock(theDataset, getRequest(), this);
+        ctxt.permissions().checkUpdateDatasetVersionLock(theDataset, getRequest(), this);
         Dataset savedDataset = null;
         
+        DatasetVersion persistedVersion = clone;
+        /*
+         * Unless a pre-change clone has been provided, we need to get it from the db.
+         * There are two cases: We're updating an existing draft, which has an id, and
+         * exists in the database We've created a new draft, with null id, and we need
+         * to get the lastest version in the db
+         * 
+         */
+        if(persistedVersion==null) {
+            Long id = getDataset().getLatestVersion().getId();
+            persistedVersion = ctxt.datasetVersion().find(id!=null ? id: getDataset().getLatestVersionForCopy().getId());
+        }
+        
+        //Will throw an IllegalCommandException if a system metadatablock is changed and the appropriate key is not supplied.
+        checkSystemMetadataKeyIfNeeded(getDataset().getOrCreateEditVersion(fmVarMet), persistedVersion);
+        
+        
         try {
             // Invariant: Dataset has no locks preventing the update
             String lockInfoMessage = "saving current edits";
@@ -270,21 +283,12 @@ public Dataset execute(CommandContext ctxt) throws CommandException {
     
     @Override
     public boolean onSuccess(CommandContext ctxt, Object r) {
-
-        boolean retVal = true;
-        Dataset dataset = (Dataset) r;
-
-        try {
-            Future<String> indexString = ctxt.index().indexDataset(dataset, true);
-        } catch (IOException | SolrServerException e) {
-            String failureLogText = "Post update dataset indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
-            failureLogText += "\r\n" + e.getLocalizedMessage();
-            LoggingUtil.writeOnSuccessFailureLog(this, failureLogText, dataset);
-            retVal = false;
-        }
-
-        return retVal;
-
+        // Async indexing significantly improves performance when updating datasets with thousands of files
+        // Indexing will be started immediately, unless an index is already busy for the given data
+        // (it will be scheduled then for later indexing of the newest version).
+        // See the documentation of asyncIndexDataset method for more details.
+        ctxt.index().asyncIndexDataset((Dataset) r, true);
+        return true;
     }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
index 57a3394ff77..fe9415f39f9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java
@@ -6,7 +6,7 @@
 import edu.harvard.iq.dataverse.Dataverse.DataverseType;
 import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel;
 import edu.harvard.iq.dataverse.authorization.Permission;
-import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
+
 import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
@@ -14,14 +14,11 @@
 import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
-import edu.harvard.iq.dataverse.search.IndexResponse;
-import java.io.IOException;
+
 import java.util.ArrayList;
 import java.util.List;
-import java.util.concurrent.Future;
 import java.util.logging.Logger;
-import javax.persistence.TypedQuery;
-import org.apache.solr.client.solrj.SolrServerException;
+import jakarta.persistence.TypedQuery;
 
 /**
  * Update an existing dataverse.
@@ -35,6 +32,8 @@ public class UpdateDataverseCommand extends AbstractCommand<Dataverse> {
 	private final List<DatasetFieldType> facetList;
         private final List<Dataverse> featuredDataverseList;
         private final List<DataverseFieldTypeInputLevel> inputLevelList;
+        
+        private boolean datasetsReindexRequired = false; 
 
 	public UpdateDataverseCommand(Dataverse editedDv, List<DatasetFieldType> facetList, List<Dataverse> featuredDataverseList, 
                     DataverseRequest aRequest,  List<DataverseFieldTypeInputLevel> inputLevelList ) {
@@ -77,9 +76,13 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
                 }
             }
             
-            DataverseType oldDvType = ctxt.dataverses().find(editedDv.getId()).getDataverseType();
-            String oldDvAlias = ctxt.dataverses().find(editedDv.getId()).getAlias();
-            String oldDvName = ctxt.dataverses().find(editedDv.getId()).getName();
+            Dataverse oldDv = ctxt.dataverses().find(editedDv.getId());
+            
+            DataverseType oldDvType = oldDv.getDataverseType();
+            String oldDvAlias = oldDv.getAlias();
+            String oldDvName = oldDv.getName();
+            oldDv = null; 
+            
             Dataverse result = ctxt.dataverses().save(editedDv);
             
             if ( facetList != null ) {
@@ -104,6 +107,14 @@ public Dataverse execute(CommandContext ctxt) throws CommandException {
                 }
             }
             
+            // We don't want to reindex the children datasets unnecessarily: 
+            // When these values are changed we need to reindex all children datasets
+            // This check is not recursive as all the values just report the immediate parent
+            if (!oldDvType.equals(editedDv.getDataverseType())
+                || !oldDvName.equals(editedDv.getName())
+                || !oldDvAlias.equals(editedDv.getAlias())) {
+                datasetsReindexRequired = true;
+            }
             
             return result;
 	}
@@ -113,12 +124,15 @@ public boolean onSuccess(CommandContext ctxt, Object r) {
         
         // first kick of async index of datasets
         // TODO: is this actually needed? Is there a better way to handle
-        try {
-            Dataverse result = (Dataverse) r;
+        // It appears that we at some point lost some extra logic here, where
+        // we only reindex the underlying datasets if one or more of the specific set
+        // of fields have been changed (since these values are included in the 
+        // indexed solr documents for dataasets). So I'm putting that back. -L.A.
+        Dataverse result = (Dataverse) r;
+        
+        if (datasetsReindexRequired) {
             List<Dataset> datasets = ctxt.datasets().findByOwnerId(result.getId());
             ctxt.index().asyncIndexDatasetList(datasets, true);
-        } catch (IOException | SolrServerException e) {
-            // these datasets are being indexed asynchrounously, so not sure how to handle errors here
         }
         
         return ctxt.dataverses().index((Dataverse) r);
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java
index add7b825659..9ef9fed4b1b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java
@@ -1,6 +1,7 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
 import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.ThemeWidgetFragment;
 import edu.harvard.iq.dataverse.authorization.Permission;
 import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
@@ -22,7 +23,6 @@
 public class UpdateDataverseThemeCommand extends AbstractCommand<Dataverse> {
     private final Dataverse editedDv;
     private final File uploadedFile;
-    private final Path logoPath = Paths.get("../docroot/logos");
     private String locate;
 
     public UpdateDataverseThemeCommand(Dataverse editedDv, File uploadedFile, DataverseRequest aRequest, String location) {
@@ -44,7 +44,7 @@ public UpdateDataverseThemeCommand(Dataverse editedDv, File uploadedFile, Datave
     public Dataverse execute(CommandContext ctxt) throws CommandException {
         // Get current dataverse, so we can delete current logo file if necessary
         Dataverse currentDv = ctxt.dataverses().find(editedDv.getId());
-        File logoFileDir = new File(logoPath.toFile(), editedDv.getId().toString());
+        File logoFileDir = ThemeWidgetFragment.getLogoDir(editedDv.getId().toString()).toFile();
         File currentFile=null;
 
         if (locate.equals("FOOTER")){
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java
index 7e37241563c..7230f9f9c0a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDvObjectPIDMetadataCommand.java
@@ -57,7 +57,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
                 // didn't need updating.
                 String currentGlobalIdProtocol = ctxt.settings().getValueForKey(SettingsServiceBean.Key.Protocol, "");
                 String dataFilePIDFormat = ctxt.settings().getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT");
-                boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabled();
+                boolean isFilePIDsEnabled = ctxt.systemConfig().isFilePIDsEnabledForCollection(target.getOwner());
                 // We will skip trying to update the global identifiers for datafiles if they
                 // aren't being used.
                 // If they are, we need to assure that there's an existing PID or, as when
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ValidateDatasetJsonCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ValidateDatasetJsonCommand.java
new file mode 100644
index 00000000000..619740ddd89
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ValidateDatasetJsonCommand.java
@@ -0,0 +1,41 @@
+
+package edu.harvard.iq.dataverse.engine.command.impl;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.authorization.Permission;
+import edu.harvard.iq.dataverse.engine.command.AbstractCommand;
+import edu.harvard.iq.dataverse.engine.command.CommandContext;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
+import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+
+ 
+import java.util.logging.Logger;
+
+/**
+ *
+ * @author stephenkraffmiller
+ */
+@RequiredPermissions(Permission.AddDataset)
+public class ValidateDatasetJsonCommand extends AbstractCommand<String> {
+    
+    private static final Logger logger = Logger.getLogger(ValidateDatasetJsonCommand.class.getCanonicalName());
+    
+    private final Dataverse dataverse;
+    private final String datasetJson;
+    
+    public ValidateDatasetJsonCommand(DataverseRequest aRequest, Dataverse target, String datasetJsonIn) {
+        super(aRequest, target);
+        dataverse = target;
+        datasetJson = datasetJsonIn;
+    }
+
+    @Override
+    public String execute(CommandContext ctxt) throws CommandException {
+
+            return ctxt.dataverses().isDatasetJsonValid(dataverse.getAlias(), datasetJson);
+
+    }   
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java
index 7c5fea0f1ec..f82c0d9ad3d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DCTermsExporter.java
@@ -2,12 +2,17 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.dublincore.DublinCoreExportUtil;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.OutputStream;
-import javax.json.JsonObject;
+import java.util.Locale;
+import java.util.Optional;
+
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 
 /**
@@ -15,34 +20,30 @@
  * @author Leonid Andreev
  */
 @AutoService(Exporter.class)
-public class DCTermsExporter implements Exporter {
+public class DCTermsExporter implements XMLExporter {
     
     
     
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return "dcterms";
     }
 
     @Override
-    public String getDisplayName() {
-        return  BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore") : "Dublin Core (DCTERMS)";
+    public String getDisplayName(Locale locale) {
+        String displayName = BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore", locale);
+        return Optional.ofNullable(displayName).orElse("Dublin Core (DCTERMS)");
     }
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try {
-            DublinCoreExportUtil.datasetJson2dublincore(json, outputStream, DublinCoreExportUtil.DC_FLAVOR_DCTERMS);
+            DublinCoreExportUtil.datasetJson2dublincore(dataProvider.getDatasetJson(), outputStream, DublinCoreExportUtil.DC_FLAVOR_DCTERMS);
         } catch (XMLStreamException xse) {
-            throw new ExportException("Caught XMLStreamException performing DCTERMS export");
+            throw new ExportException("Caught XMLStreamException performing DCTERMS export", xse);
         }
     }
 
-    @Override
-    public Boolean isXMLFormat() {
-        return true;
-    }
-    
     @Override
     public Boolean isHarvestable() {
         return false;
@@ -54,22 +55,18 @@ public Boolean isAvailableToUsers() {
     }
     
     @Override
-    public String getXMLNameSpace() throws ExportException {
+    public String getXMLNameSpace() {
         return DublinCoreExportUtil.DCTERMS_XML_NAMESPACE;   
     }
     
     @Override
-    public String getXMLSchemaLocation() throws ExportException {
+    public String getXMLSchemaLocation() {
         return DublinCoreExportUtil.DCTERMS_XML_SCHEMALOCATION;
     }
     
     @Override
-    public String getXMLSchemaVersion() throws ExportException {
+    public String getXMLSchemaVersion() {
         return DublinCoreExportUtil.DEFAULT_XML_VERSION;
     }
-    
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter doesn't need/doesn't currently take any parameters
-    }
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java
index 59ff539af37..5119b4b96c7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExportServiceBean.java
@@ -33,14 +33,14 @@
 import java.util.logging.Logger;
 import java.util.logging.Level;
 import java.io.OutputStream;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import javax.xml.stream.XMLStreamWriter;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLOutputFactory;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java
index bb325226fad..d48ce3a537d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DDIExporter.java
@@ -2,91 +2,85 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.OutputStream;
-import javax.json.JsonObject;
+import java.util.Locale;
+import java.util.Optional;
+
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
 import javax.xml.stream.XMLOutputFactory;
 
 /**
- * This exporter is for the "full" DDI, that includes the file-level,
- * <data> and <var> metadata.
+ * This exporter is for the "full" DDI, that includes the file-level, <data> and
+ * <var> metadata.
  *
- * @author Leonid Andreev
- * (based on the original DDIExporter by
- * @author skraffmi
- * - renamed OAI_DDIExporter)
+ * @author Leonid Andreev (based on the original DDIExporter by
+ * @author skraffmi - renamed OAI_DDIExporter)
  */
 @AutoService(Exporter.class)
-public class DDIExporter implements Exporter {
+public class DDIExporter implements XMLExporter {
     public static String DEFAULT_XML_NAMESPACE = "ddi:codebook:2_5";
     public static String DEFAULT_XML_SCHEMALOCATION = "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd";
     public static String DEFAULT_XML_VERSION = "2.5";
     public static final String PROVIDER_NAME = "ddi";
-    
+
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return PROVIDER_NAME;
     }
 
     @Override
-    public String getDisplayName() {
-        return  BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi") : "DDI";
+    public String getDisplayName(Locale locale) {
+        String displayName = BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi", locale);
+        return Optional.ofNullable(displayName).orElse("DDI");
     }
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try {
-        XMLStreamWriter xmlw = XMLOutputFactory.newInstance().createXMLStreamWriter(outputStream);
-        xmlw.writeStartDocument();
-        xmlw.flush();
-            DdiExportUtil.datasetJson2ddi(json, version, outputStream);
+            XMLStreamWriter xmlw = XMLOutputFactory.newInstance().createXMLStreamWriter(outputStream);
+            xmlw.writeStartDocument();
+            xmlw.flush();
+            DdiExportUtil.datasetJson2ddi(dataProvider.getDatasetJson(), dataProvider.getDatasetFileDetails(),
+                    outputStream);
         } catch (XMLStreamException xse) {
-            throw new ExportException ("Caught XMLStreamException performing DDI export");
+            throw new ExportException("Caught XMLStreamException performing DDI export", xse);
         }
     }
 
-    @Override
-    public Boolean isXMLFormat() {
-        return true; 
-    }
-    
     @Override
     public Boolean isHarvestable() {
         // No, we don't want this format to be harvested!
-        // For datasets with tabular data the <data> portions of the DDIs 
-        // become huge and expensive to parse; even as they don't contain any 
+        // For datasets with tabular data the <data> portions of the DDIs
+        // become huge and expensive to parse; even as they don't contain any
         // metadata useful to remote harvesters. -- L.A. 4.5
         return false;
     }
-    
+
     @Override
     public Boolean isAvailableToUsers() {
         return true;
     }
-    
+
     @Override
-    public String getXMLNameSpace() throws ExportException {
-        return DDIExporter.DEFAULT_XML_NAMESPACE;   
+    public String getXMLNameSpace() {
+        return DDIExporter.DEFAULT_XML_NAMESPACE;
     }
-    
+
     @Override
-    public String getXMLSchemaLocation() throws ExportException {
+    public String getXMLSchemaLocation() {
         return DDIExporter.DEFAULT_XML_SCHEMALOCATION;
     }
-    
+
     @Override
-    public String getXMLSchemaVersion() throws ExportException {
+    public String getXMLSchemaVersion() {
         return DDIExporter.DEFAULT_XML_VERSION;
     }
-    
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter does not uses or supports any parameters as of now.
-    }
 }
-
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DataCiteExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DataCiteExporter.java
index 7110067296d..8caf32b2df0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DataCiteExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DataCiteExporter.java
@@ -3,61 +3,51 @@
 
 import com.google.auto.service.AutoService;
 
-import edu.harvard.iq.dataverse.DOIDataCiteRegisterService;
-import edu.harvard.iq.dataverse.DataCitation;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.IOException;
 import java.io.OutputStream;
-import java.nio.charset.Charset;
-import java.util.Map;
-import javax.json.JsonObject;
+import java.nio.charset.StandardCharsets;
+import java.util.Locale;
+import java.util.Optional;
 
 /**
  *
  * @author qqmyers
  */
 @AutoService(Exporter.class)
-public class DataCiteExporter implements Exporter {
+public class DataCiteExporter implements XMLExporter {
 
     private static String DEFAULT_XML_NAMESPACE = "http://datacite.org/schema/kernel-3";
     private static String DEFAULT_XML_SCHEMALOCATION = "http://datacite.org/schema/kernel-3 http://schema.datacite.org/meta/kernel-3/metadata.xsd";
     private static String DEFAULT_XML_VERSION = "3.0";
 
     public static final String NAME = "Datacite";
+
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return NAME;
     }
 
     @Override
-    public String getDisplayName() {
-        return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.datacite") != null
-                ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.datacite")
-                : "DataCite";
+    public String getDisplayName(Locale locale) {
+        String displayName = BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.datacite", locale);
+        return Optional.ofNullable(displayName).orElse("DataCite");
     }
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream)
-            throws ExportException {
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try {
-            DataCitation dc = new DataCitation(version);
-            
-            Map<String, String> metadata = dc.getDataCiteMetadata();
-            String xml = DOIDataCiteRegisterService.getMetadataFromDvObject(
-                    version.getDataset().getGlobalId().asString(), metadata, version.getDataset());
-            outputStream.write(xml.getBytes(Charset.forName("utf-8")));
+            String xml = dataProvider.getDataCiteXml();
+            outputStream.write(xml.getBytes(StandardCharsets.UTF_8));
         } catch (IOException e) {
-            throw new ExportException("Caught IOException performing DataCite export");
+            throw new ExportException("Caught IOException performing DataCite export", e);
         }
     }
 
-    @Override
-    public Boolean isXMLFormat() {
-        return true;
-    }
-
     @Override
     public Boolean isHarvestable() {
         return true;
@@ -69,23 +59,18 @@ public Boolean isAvailableToUsers() {
     }
 
     @Override
-    public String getXMLNameSpace() throws ExportException {
+    public String getXMLNameSpace() {
         return DataCiteExporter.DEFAULT_XML_NAMESPACE;
     }
 
     @Override
-    public String getXMLSchemaLocation() throws ExportException {
+    public String getXMLSchemaLocation() {
         return DataCiteExporter.DEFAULT_XML_SCHEMALOCATION;
     }
 
     @Override
-    public String getXMLSchemaVersion() throws ExportException {
+    public String getXMLSchemaVersion() {
         return DataCiteExporter.DEFAULT_XML_VERSION;
     }
 
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter does not uses or supports any parameters as of now.
-    }
-
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java
index 113e669f511..0fa32dd4bfa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/DublinCoreExporter.java
@@ -2,12 +2,17 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.dublincore.DublinCoreExportUtil;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.OutputStream;
-import javax.json.JsonObject;
+import java.util.Locale;
+import java.util.Optional;
+
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 
 /**
@@ -15,61 +20,53 @@
  * @author skraffmi
  */
 @AutoService(Exporter.class)
-public class DublinCoreExporter implements Exporter {
-    
-    
-   
+public class DublinCoreExporter implements XMLExporter {
+
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return "oai_dc";
     }
 
     @Override
-    public String getDisplayName() {
-        return  BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore") : "Dublin Core";
+    public String getDisplayName(Locale locale) {
+        // ToDo: dataset.exportBtn.itemLabel.dublinCore is shared with the
+        // DCTermsExporter
+        String displayName = BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dublinCore", locale);
+        return Optional.ofNullable(displayName).orElse("Dublin Core");
     }
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try {
-            DublinCoreExportUtil.datasetJson2dublincore(json, outputStream, DublinCoreExportUtil.DC_FLAVOR_OAI);
+            DublinCoreExportUtil.datasetJson2dublincore(dataProvider.getDatasetJson(), outputStream,
+                    DublinCoreExportUtil.DC_FLAVOR_OAI);
         } catch (XMLStreamException xse) {
-            throw new ExportException("Caught XMLStreamException performing DC export");
+            throw new ExportException("Caught XMLStreamException performing DC export", xse);
         }
     }
 
-    @Override
-    public Boolean isXMLFormat() {
-        return true;
-    }
-    
     @Override
     public Boolean isHarvestable() {
         return true;
     }
-    
+
     @Override
     public Boolean isAvailableToUsers() {
         return false;
     }
-    
+
     @Override
-    public String getXMLNameSpace() throws ExportException {
-        return DublinCoreExportUtil.OAI_DC_XML_NAMESPACE;   
+    public String getXMLNameSpace() {
+        return DublinCoreExportUtil.OAI_DC_XML_NAMESPACE;
     }
-    
+
     @Override
-    public String getXMLSchemaLocation() throws ExportException {
+    public String getXMLSchemaLocation() {
         return DublinCoreExportUtil.OAI_DC_XML_SCHEMALOCATION;
     }
-    
+
     @Override
-    public String getXMLSchemaVersion() throws ExportException {
+    public String getXMLSchemaVersion() {
         return DublinCoreExportUtil.DEFAULT_XML_VERSION;
     }
-    
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter doesn't need/doesn't currently take any parameters
-    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportException.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportException.java
deleted file mode 100644
index 0909bf06ca4..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/export/ExportException.java
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * To change this license header, choose License Headers in Project Properties.
- * To change this template file, choose Tools | Templates
- * and open the template in the editor.
- */
-package edu.harvard.iq.dataverse.export;
-
-/**
- *
- * @author Leonid Andreev
- */
-public class ExportException extends Exception {
-    public ExportException(String message) {
-        super(message);
-    }
-
-    public ExportException(String message, Throwable cause) {
-        super(message, cause);
-    }
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
index ddc6296093c..8342e7df92a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ExportService.java
@@ -5,14 +5,16 @@
 import edu.harvard.iq.dataverse.Embargo;
 import edu.harvard.iq.dataverse.FileMetadata;
 
-import static edu.harvard.iq.dataverse.GlobalIdServiceBean.logger;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO;
 import edu.harvard.iq.dataverse.dataaccess.DataAccessOption;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.json.JsonPrinter;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.XMLExporter;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileOutputStream;
@@ -20,26 +22,31 @@
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
+import java.net.URL;
+import java.net.URLClassLoader;
 import java.nio.channels.Channel;
 import java.nio.channels.Channels;
 import java.nio.channels.WritableByteChannel;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.sql.Timestamp;
 import java.time.LocalDate;
 import java.time.ZoneId;
 import java.util.ArrayList;
 import java.util.Date;
+import java.util.HashMap;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
+import java.util.Optional;
 import java.util.ServiceConfigurationError;
 import java.util.ServiceLoader;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MediaType;
 
 import org.apache.commons.io.IOUtils;
 
@@ -51,9 +58,54 @@ public class ExportService {
 
     private static ExportService service;
     private ServiceLoader<Exporter> loader;
+    private Map<String, Exporter> exporterMap = new HashMap<>();
+
+    private static final Logger logger = Logger.getLogger(ExportService.class.getCanonicalName());
 
     private ExportService() {
-        loader = ServiceLoader.load(Exporter.class);
+        /*
+         * Step 1 - find the EXPORTERS dir and add all jar files there to a class loader
+         */
+        List<URL> jarUrls = new ArrayList<>();
+        Optional<String> exportPathSetting = JvmSettings.EXPORTERS_DIRECTORY.lookupOptional(String.class);
+        if (exportPathSetting.isPresent()) {
+            Path exporterDir = Paths.get(exportPathSetting.get());
+            // Get all JAR files from the configured directory
+            try (DirectoryStream<Path> stream = Files.newDirectoryStream(exporterDir, "*.jar")) {
+                // Using the foreach loop here to enable catching the URI/URL exceptions
+                for (Path path : stream) {
+                    logger.log(Level.FINE, "Adding {0}", path.toUri().toURL());
+                    // This is the syntax required to indicate a jar file from which classes should
+                    // be loaded (versus a class file).
+                    jarUrls.add(new URL("jar:" + path.toUri().toURL() + "!/"));
+                }
+            } catch (IOException e) {
+                logger.warning("Problem accessing external Exporters: " + e.getLocalizedMessage());
+            }
+        }
+        URLClassLoader cl = URLClassLoader.newInstance(jarUrls.toArray(new URL[0]), this.getClass().getClassLoader());
+
+        /*
+         * Step 2 - load all Exporters that can be found, using the jars as additional
+         * sources
+         */
+        loader = ServiceLoader.load(Exporter.class, cl);
+        /*
+         * Step 3 - Fill exporterMap with providerName as the key, allow external
+         * exporters to replace internal ones for the same providerName. FWIW: From the
+         * logging it appears that ServiceLoader returns classes in ~ alphabetical order
+         * rather than by class loader, so internal classes handling a given
+         * providerName may be processed before or after external ones.
+         */
+        loader.forEach(exp -> {
+            String formatName = exp.getFormatName();
+            // If no entry for this providerName yet or if it is an external exporter
+            if (!exporterMap.containsKey(formatName) || exp.getClass().getClassLoader().equals(cl)) {
+                exporterMap.put(formatName, exp);
+            }
+            logger.log(Level.FINE, "SL: " + exp.getFormatName() + " from " + exp.getClass().getCanonicalName()
+                    + " and classloader: " + exp.getClass().getClassLoader().getClass().getCanonicalName());
+        });
     }
 
     public static synchronized ExportService getInstance() {
@@ -63,23 +115,22 @@ public static synchronized ExportService getInstance() {
         return service;
     }
 
-    public List< String[]> getExportersLabels() {
+    public List<String[]> getExportersLabels() {
         List<String[]> retList = new ArrayList<>();
-        Iterator<Exporter> exporters = ExportService.getInstance().loader.iterator();
-        while (exporters.hasNext()) {
-            Exporter e = exporters.next();
+
+        exporterMap.values().forEach(exp -> {
             String[] temp = new String[2];
-            temp[0] = e.getDisplayName();
-            temp[1] = e.getProviderName();
+            temp[0] = exp.getDisplayName(BundleUtil.getCurrentLocale());
+            temp[1] = exp.getFormatName();
             retList.add(temp);
-        }
+        });
         return retList;
     }
 
     public InputStream getExport(Dataset dataset, String formatName) throws ExportException, IOException {
-        // first we will try to locate an already existing, cached export 
-        // for this format: 
-        
+        // first we will try to locate an already existing, cached export
+        // for this format:
+
         InputStream exportInputStream = getCachedExportFormat(dataset, formatName);
 
         // The DDI export is limited for restricted and actively embargoed files (no
@@ -105,9 +156,9 @@ public InputStream getExport(Dataset dataset, String formatName) throws ExportEx
                     // one check that nextembargoEnd exists and is after the last export and before
                     // now versus scanning through files until we potentially find such an embargo.
                     Embargo e = fm.getDataFile().getEmbargo();
-                    if(e!=null) {
-                    logger.fine("Datafile:  " + fm.getDataFile().getId());
-                    logger.fine("Embargo end date: "+ e.getFormattedDateAvailable());
+                    if (e != null) {
+                        logger.fine("Datafile:  " + fm.getDataFile().getId());
+                        logger.fine("Embargo end date: " + e.getFormattedDateAvailable());
                     }
                     if (e != null && !embargoIds.contains(e.getId()) && e.getDateAvailable().isAfter(exportLocalDate)
                             && e.getDateAvailable().isBefore(LocalDate.now())) {
@@ -117,7 +168,7 @@ public InputStream getExport(Dataset dataset, String formatName) throws ExportEx
                         // it refresh
                         clearCachedExport = true;
                         break;
-                    } else if(e!=null) {
+                    } else if (e != null) {
                         logger.fine("adding embargo to checked list: " + e.getId());
                         embargoIds.add(e.getId());
                     }
@@ -135,7 +186,7 @@ public InputStream getExport(Dataset dataset, String formatName) throws ExportEx
                 }
             }
         }
-        
+
         if (exportInputStream != null) {
             return exportInputStream;
         }
@@ -150,7 +201,7 @@ public InputStream getExport(Dataset dataset, String formatName) throws ExportEx
             return exportInputStream;
         }
 
-        // if there is no cached export still - we have to give up and throw 
+        // if there is no cached export still - we have to give up and throw
         // an exception!
         throw new ExportException("Failed to export the dataset as " + formatName);
 
@@ -175,8 +226,8 @@ public String getExportAsString(Dataset dataset, String formatName) {
                 inputStream.close();
                 return sb.toString();
             }
-        } catch (ExportException | IOException ex) {
-            //ex.printStackTrace();
+        } catch (IOException ex) {
+            logger.log(Level.FINE, ex.getMessage(), ex);
             return null;
         } finally {
             IOUtils.closeQuietly(inp);
@@ -186,9 +237,9 @@ public String getExportAsString(Dataset dataset, String formatName) {
 
     }
 
-    // This method goes through all the Exporters and calls 
-    // the "chacheExport()" method that will save the produced output  
-    // in a file in the dataset directory, on each Exporter available. 
+    // This method goes through all the Exporters and calls
+    // the "chacheExport()" method that will save the produced output
+    // in a file in the dataset directory, on each Exporter available.
     public void exportAllFormats(Dataset dataset) throws ExportException {
         try {
             clearAllCachedFormats(dataset);
@@ -201,146 +252,168 @@ public void exportAllFormats(Dataset dataset) throws ExportException {
             if (releasedVersion == null) {
                 throw new ExportException("No released version for dataset " + dataset.getGlobalId().toString());
             }
-
-            final JsonObjectBuilder datasetAsJsonBuilder = JsonPrinter.jsonAsDatasetDto(releasedVersion);
-            JsonObject datasetAsJson = datasetAsJsonBuilder.build();
-
-            Iterator<Exporter> exporters = loader.iterator();
-            while (exporters.hasNext()) {
-                Exporter e = exporters.next();
-                String formatName = e.getProviderName();
-
-                cacheExport(releasedVersion, formatName, datasetAsJson, e);
-
+            InternalExportDataProvider dataProvider = new InternalExportDataProvider(releasedVersion);
+
+            for (Exporter e : exporterMap.values()) {
+                String formatName = e.getFormatName();
+                if(e.getPrerequisiteFormatName().isPresent()) {
+                    String prereqFormatName = e.getPrerequisiteFormatName().get();
+                    try (InputStream preReqStream = getExport(dataset, prereqFormatName)) {
+                        dataProvider.setPrerequisiteInputStream(preReqStream);
+                        cacheExport(dataset, dataProvider, formatName, e);
+                        dataProvider.setPrerequisiteInputStream(null);
+                    } catch (IOException ioe) {
+                        throw new ExportException ("Could not get prerequisite " + e.getPrerequisiteFormatName() + " to create " + formatName + "export for dataset " + dataset.getId(), ioe);
+                    }
+                } else {
+                    cacheExport(dataset, dataProvider, formatName, e);
+                }
             }
-            // Finally, if we have been able to successfully export in all available 
-            // formats, we'll increment the "last exported" time stamp: 
+            // Finally, if we have been able to successfully export in all available
+            // formats, we'll increment the "last exported" time stamp:
             dataset.setLastExportTime(new Timestamp(new Date().getTime()));
-            
+
         } catch (ServiceConfigurationError serviceError) {
             throw new ExportException("Service configuration error during export. " + serviceError.getMessage());
         } catch (RuntimeException e) {
-            //e.printStackTrace();
-            throw new ExportException("Unknown runtime exception exporting metadata. " + (e.getMessage() == null ? "" : e.getMessage()));
+            logger.log(Level.FINE, e.getMessage(), e);
+            throw new ExportException(
+                    "Unknown runtime exception exporting metadata. " + (e.getMessage() == null ? "" : e.getMessage()));
         }
 
     }
 
     public void clearAllCachedFormats(Dataset dataset) throws IOException {
         try {
-            Iterator<Exporter> exporters = loader.iterator();
-            while (exporters.hasNext()) {
-                Exporter e = exporters.next();
-                String formatName = e.getProviderName();
 
+            for (Exporter e : exporterMap.values()) {
+                String formatName = e.getFormatName();
                 clearCachedExport(dataset, formatName);
             }
 
             dataset.setLastExportTime(null);
         } catch (IOException ex) {
-            //not fatal
+            // not fatal
         }
     }
 
-    // This method finds the exporter for the format requested, 
+    // This method finds the exporter for the format requested,
     // then produces the dataset metadata as a JsonObject, then calls
-    // the "cacheExport()" method that will save the produced output  
-    // in a file in the dataset directory. 
+    // the "cacheExport()" method that will save the produced output
+    // in a file in the dataset directory.
     public void exportFormat(Dataset dataset, String formatName) throws ExportException {
         try {
-            Iterator<Exporter> exporters = loader.iterator();
-            while (exporters.hasNext()) {
-                Exporter e = exporters.next();
-                if (e.getProviderName().equals(formatName)) {
-                    DatasetVersion releasedVersion = dataset.getReleasedVersion();
-                    if (releasedVersion == null) {
-                        throw new IllegalStateException("No Released Version");
+
+            Exporter e = exporterMap.get(formatName);
+            if (e != null) {
+                DatasetVersion releasedVersion = dataset.getReleasedVersion();
+                if (releasedVersion == null) {
+                    throw new ExportException(
+                            "No published version found during export. " + dataset.getGlobalId().toString());
+                }
+                if(e.getPrerequisiteFormatName().isPresent()) {
+                    String prereqFormatName = e.getPrerequisiteFormatName().get();
+                    try (InputStream preReqStream = getExport(dataset, prereqFormatName)) {
+                        InternalExportDataProvider dataProvider = new InternalExportDataProvider(releasedVersion, preReqStream);
+                        cacheExport(dataset, dataProvider, formatName, e);
+                    } catch (IOException ioe) {
+                        throw new ExportException ("Could not get prerequisite " + e.getPrerequisiteFormatName() + " to create " + formatName + "export for dataset " + dataset.getId(), ioe);
                     }
-                    final JsonObjectBuilder datasetAsJsonBuilder = JsonPrinter.jsonAsDatasetDto(releasedVersion);
-                    cacheExport(releasedVersion, formatName, datasetAsJsonBuilder.build(), e);
+                } else {
+                    InternalExportDataProvider dataProvider = new InternalExportDataProvider(releasedVersion);
+                    cacheExport(dataset, dataProvider, formatName, e);
                 }
+                // As with exportAll, we should update the lastexporttime for the dataset
+                dataset.setLastExportTime(new Timestamp(new Date().getTime()));
+            } else {
+                throw new ExportException("Exporter not found");
             }
-        } catch (ServiceConfigurationError serviceError) {
-            throw new ExportException("Service configuration error during export. " + serviceError.getMessage());
         } catch (IllegalStateException e) {
-            throw new ExportException("No published version found during export. " + dataset.getGlobalId().toString());
+            // IllegalStateException can potentially mean very different, and
+            // unexpected things. An exporter attempting to get a single primitive
+            // value from a fieldDTO that is in fact a Multiple and contains a
+            // json vector (this has happened, for example, when the code in the
+            // DDI exporter was not updated following a metadata fieldtype change),
+            // will result in IllegalStateException.
+            throw new ExportException("IllegalStateException caught when exporting " + formatName + " for dataset "
+                    + dataset.getGlobalId().toString()
+                    + "; may or may not be due to a mismatch between an exporter code and a metadata block update. "
+                    + e.getMessage());
         }
-        
-        //As with exportAll, we should update the lastexporttime for the dataset
-        dataset.setLastExportTime(new Timestamp(new Date().getTime()));
+
     }
-    
 
     public Exporter getExporter(String formatName) throws ExportException {
-        try {
-            Iterator<Exporter> exporters = loader.iterator();
-            while (exporters.hasNext()) {
-                Exporter e = exporters.next();
-                if (e.getProviderName().equals(formatName)) {
-                    return e;
-                }
-            }
-        } catch (ServiceConfigurationError serviceError) {
-            throw new ExportException("Service configuration error during export. " + serviceError.getMessage());
-        } catch (Exception ex) {
-            throw new ExportException("Could not find Exporter \"" + formatName + "\", unknown exception");
+        Exporter e = exporterMap.get(formatName);
+        if (e != null) {
+            return e;
         }
         throw new ExportException("No such Exporter: " + formatName);
     }
 
-    // This method runs the selected metadata exporter, caching the output 
+    // This method runs the selected metadata exporter, caching the output
     // in a file in the dataset directory / container based on its DOI:
-    private void cacheExport(DatasetVersion version, String format, JsonObject datasetAsJson, Exporter exporter) throws ExportException {
-    	boolean tempFileUsed = false;
-    	File tempFile = null;
-    	OutputStream outputStream = null;
-    	Dataset dataset = version.getDataset();
-    	StorageIO<Dataset> storageIO = null;
-    	try {
-    		// With some storage drivers, we can open a WritableChannel, or OutputStream 
-    		// to directly write the generated metadata export that we want to cache; 
-    		// Some drivers (like Swift) do not support that, and will give us an
-    		// "operation not supported" exception. If that's the case, we'll have 
-    		// to save the output into a temp file, and then copy it over to the 
-    		// permanent storage using the IO "save" command: 
-    		try {
-    			storageIO = DataAccess.getStorageIO(dataset);
-    			Channel outputChannel = storageIO.openAuxChannel("export_" + format + ".cached", DataAccessOption.WRITE_ACCESS);
-    			outputStream = Channels.newOutputStream((WritableByteChannel) outputChannel);
-    		} catch (IOException ioex) {
-    			// A common case = an IOException in openAuxChannel which is not supported by S3 stores for WRITE_ACCESS
-    			tempFileUsed = true;
-    			tempFile = File.createTempFile("tempFileToExport", ".tmp");
-    			outputStream = new FileOutputStream(tempFile);
-    		}
-
-    		try {
-    			// Write the metadata export file to the outputStream, which may be the final location or a temp file
-    			exporter.exportDataset(version, datasetAsJson, outputStream);
-    			outputStream.flush();
-    			outputStream.close();
-    			if(tempFileUsed) {                  
-    				logger.fine("Saving export_" + format + ".cached aux file from temp file: " + Paths.get(tempFile.getAbsolutePath()));
-    				storageIO.savePathAsAux(Paths.get(tempFile.getAbsolutePath()), "export_" + format + ".cached");
-    				boolean tempFileDeleted = tempFile.delete();
-    				logger.fine("tempFileDeleted: " + tempFileDeleted);
-    			}
-    		} catch (ExportException exex) {
-    			/*This exception is from the particular exporter and may not affect other exporters (versus other exceptions in this method which are from the basic mechanism to create a file)
-    			 * So we'll catch it here and report so that loops over other exporters can continue. 
-    			 * Todo: Might be better to create a new exception subtype and send it upward, but the callers currently just log and ignore beyond terminating any loop over exporters.
-    			 */
-    			logger.warning("Exception thrown while creating export_" + format + ".cached : " + exex.getMessage());
-    		} catch (IOException ioex) {
-    			throw new ExportException("IO Exception thrown exporting as " + "export_" + format + ".cached");
-    		}
-
-    	} catch (IOException ioex) {
-    		//This catches any problem creating a local temp file in the catch clause above
-    		throw new ExportException("IO Exception thrown before exporting as " + "export_" + format + ".cached");
-    	} finally {
-    		IOUtils.closeQuietly(outputStream);
-    	}
+    private void cacheExport(Dataset dataset, InternalExportDataProvider dataProvider, String format, Exporter exporter)
+            throws ExportException {
+        
+        OutputStream outputStream = null;
+        try {
+            boolean tempFileUsed = false;
+            File tempFile = null;
+            StorageIO<Dataset> storageIO = null;
+
+            // With some storage drivers, we can open a WritableChannel, or OutputStream
+            // to directly write the generated metadata export that we want to cache;
+            // Some drivers (like Swift) do not support that, and will give us an
+            // "operation not supported" exception. If that's the case, we'll have
+            // to save the output into a temp file, and then copy it over to the
+            // permanent storage using the IO "save" command:
+            try {
+                storageIO = DataAccess.getStorageIO(dataset);
+                Channel outputChannel = storageIO.openAuxChannel("export_" + format + ".cached",
+                        DataAccessOption.WRITE_ACCESS);
+                outputStream = Channels.newOutputStream((WritableByteChannel) outputChannel);
+            } catch (IOException ioex) {
+                // A common case = an IOException in openAuxChannel which is not supported by S3
+                // stores for WRITE_ACCESS
+                tempFileUsed = true;
+                tempFile = File.createTempFile("tempFileToExport", ".tmp");
+                outputStream = new FileOutputStream(tempFile);
+            }
+
+            try {
+                // Write the metadata export file to the outputStream, which may be the final
+                // location or a temp file
+                exporter.exportDataset(dataProvider, outputStream);
+                outputStream.flush();
+                outputStream.close();
+                if (tempFileUsed) {
+                    logger.fine("Saving export_" + format + ".cached aux file from temp file: "
+                            + Paths.get(tempFile.getAbsolutePath()));
+                    storageIO.savePathAsAux(Paths.get(tempFile.getAbsolutePath()), "export_" + format + ".cached");
+                    boolean tempFileDeleted = tempFile.delete();
+                    logger.fine("tempFileDeleted: " + tempFileDeleted);
+                }
+            } catch (ExportException exex) {
+                /*
+                 * This exception is from the particular exporter and may not affect other
+                 * exporters (versus other exceptions in this method which are from the basic
+                 * mechanism to create a file) So we'll catch it here and report so that loops
+                 * over other exporters can continue. Todo: Might be better to create a new
+                 * exception subtype and send it upward, but the callers currently just log and
+                 * ignore beyond terminating any loop over exporters.
+                 */
+                logger.warning("Exception thrown while creating export_" + format + ".cached : " + exex.getMessage());
+            } catch (IOException ioex) {
+                throw new ExportException("IO Exception thrown exporting as " + "export_" + format + ".cached");
+            }
+
+        } catch (IOException ioex) {
+            // This catches any problem creating a local temp file in the catch clause above
+            throw new ExportException("IO Exception thrown before exporting as " + "export_" + format + ".cached");
+        } finally {
+            IOUtils.closeQuietly(outputStream);
+        }
 
     }
 
@@ -355,9 +428,9 @@ private void clearCachedExport(Dataset dataset, String format) throws IOExceptio
 
     }
 
-    // This method checks if the metadata has already been exported in this 
-    // format and cached on disk. If it has, it'll open the file and retun 
-    // the file input stream. If not, it'll return null. 
+    // This method checks if the metadata has already been exported in this
+    // format and cached on disk. If it has, it'll open the file and retun
+    // the file input stream. If not, it'll return null.
     private InputStream getCachedExportFormat(Dataset dataset, String formatName) throws ExportException, IOException {
 
         StorageIO<Dataset> dataAccess = null;
@@ -379,11 +452,12 @@ private InputStream getCachedExportFormat(Dataset dataset, String formatName) th
 
     }
 
-    /*The below method, getCachedExportSize(), is not currently used.
-     *An exercise for the reader could be to refactor it if it's needed
-     *to be compatible with storage drivers other than local filesystem.
-     *Files.exists() would need to be discarded.
-     * -- L.A. 4.8 */
+    /*
+     * The below method, getCachedExportSize(), is not currently used. An exercise
+     * for the reader could be to refactor it if it's needed to be compatible with
+     * storage drivers other than local filesystem. Files.exists() would need to be
+     * discarded. -- L.A. 4.8
+     */
 //    public Long getCachedExportSize(Dataset dataset, String formatName) {
 //        try {
 //            if (dataset.getFileSystemDirectory() != null) {
@@ -399,33 +473,19 @@ private InputStream getCachedExportFormat(Dataset dataset, String formatName) th
 //        return null;
 //    }
     public Boolean isXMLFormat(String provider) {
-        try {
-            Iterator<Exporter> exporters = loader.iterator();
-            while (exporters.hasNext()) {
-                Exporter e = exporters.next();
-                if (e.getProviderName().equals(provider)) {
-                    return e.isXMLFormat();
-                }
-            }
-        } catch (ServiceConfigurationError serviceError) {
-            serviceError.printStackTrace();
+        Exporter e = exporterMap.get(provider);
+        if (e != null) {
+            return e instanceof XMLExporter;
         }
         return null;
     }
 
-	public String getMediaType(String provider) {
-		 try {
-	            Iterator<Exporter> exporters = loader.iterator();
-	            while (exporters.hasNext()) {
-	                Exporter e = exporters.next();
-	                if (e.getProviderName().equals(provider)) {
-	                    return e.getMediaType();
-	                }
-	            }
-	        } catch (ServiceConfigurationError serviceError) {
-	            serviceError.printStackTrace();
-	        }
-	        return MediaType.TEXT_PLAIN;
-	}
+    public String getMediaType(String provider) {
+        Exporter e = exporterMap.get(provider);
+        if (e != null) {
+            return e.getMediaType();
+        }
+        return MediaType.TEXT_PLAIN;
+    }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java
index 367ac4bbc5b..9d0b107299e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/HtmlCodeBookExporter.java
@@ -1,55 +1,51 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
-import javax.json.JsonObject;
-import javax.ws.rs.core.MediaType;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.core.MediaType;
 import javax.xml.stream.XMLStreamException;
-import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.nio.file.Path;
-import java.nio.file.Paths;
+import java.util.Locale;
+import java.util.Optional;
 
 @AutoService(Exporter.class)
 public class HtmlCodeBookExporter implements Exporter {
 
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return "html";
     }
 
     @Override
-    public String getDisplayName() {
-        return  BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.html") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.html") : "DDI html codebook";
+    public String getDisplayName(Locale locale) {
+        String displayName = BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.html", locale);
+        return Optional.ofNullable(displayName).orElse("DDI html codebook");
     }
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
-        try {
-            InputStream ddiInputStream;
-            try {
-                ddiInputStream = ExportService.getInstance().getExport(version.getDataset(), "ddi");
-            } catch(ExportException | IOException e) {
-                throw new ExportException ("Cannot open export_ddi cached file");
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
+        Optional<InputStream> ddiInputStreamOptional = dataProvider.getPrerequisiteInputStream();
+        if (ddiInputStreamOptional.isPresent()) {
+            try (InputStream ddiInputStream = ddiInputStreamOptional.get()) {
+                DdiExportUtil.datasetHtmlDDI(ddiInputStream, outputStream);
+            } catch (IOException e) {
+                throw new ExportException("Cannot open export_ddi cached file");
+            } catch (XMLStreamException xse) {
+                throw new ExportException("Caught XMLStreamException performing DDI export");
             }
-            DdiExportUtil.datasetHtmlDDI(ddiInputStream, outputStream);
-        } catch (XMLStreamException xse) {
-            throw new ExportException ("Caught XMLStreamException performing DDI export");
+        } else {
+            throw new ExportException("No prerequisite input stream found");
         }
     }
 
-    @Override
-    public Boolean isXMLFormat() {
-        return false;
-    }
-
     @Override
     public Boolean isHarvestable() {
         // No, we don't want this format to be harvested!
@@ -65,23 +61,9 @@ public Boolean isAvailableToUsers() {
     }
 
     @Override
-    public String getXMLNameSpace() throws ExportException {
-        return null;
-    }
-
-    @Override
-    public String getXMLSchemaLocation() throws ExportException {
-        return null;
-    }
-
-    @Override
-    public String getXMLSchemaVersion() throws ExportException {
-        return null;
-    }
-
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter does not uses or supports any parameters as of now.
+    public  Optional<String> getPrerequisiteFormatName() {
+        //This exporter relies on being able to get the output of the ddi exporter
+        return Optional.of("ddi");
     }
 
     @Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java b/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java
new file mode 100644
index 00000000000..a7967f6ccb6
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/export/InternalExportDataProvider.java
@@ -0,0 +1,93 @@
+package edu.harvard.iq.dataverse.export;
+
+import java.io.InputStream;
+import java.util.Optional;
+
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+
+import edu.harvard.iq.dataverse.DOIDataCiteRegisterService;
+import edu.harvard.iq.dataverse.DataCitation;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.FileMetadata;
+import io.gdcc.spi.export.ExportDataProvider;
+import edu.harvard.iq.dataverse.util.bagit.OREMap;
+import edu.harvard.iq.dataverse.util.json.JsonPrinter;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
+/**
+ * Provides all data necessary to create an export
+ * 
+ */
+public class InternalExportDataProvider implements ExportDataProvider {
+
+    private DatasetVersion dv;
+    private JsonObject jsonRepresentation = null;
+    private JsonObject schemaDotOrgRepresentation = null;
+    private JsonObject oreRepresentation = null;
+    private InputStream is = null;
+
+    InternalExportDataProvider(DatasetVersion dv) {
+        this.dv = dv;
+    }
+    
+    InternalExportDataProvider(DatasetVersion dv, InputStream is) {
+        this.dv = dv;
+        this.is=is;
+    }
+
+    @Override
+    public JsonObject getDatasetJson() {
+        if (jsonRepresentation == null) {
+            final JsonObjectBuilder datasetAsJsonBuilder = JsonPrinter.jsonAsDatasetDto(dv);
+            jsonRepresentation = datasetAsJsonBuilder.build();
+        }
+        return jsonRepresentation;
+    }
+
+    @Override
+    public JsonObject getDatasetSchemaDotOrg() {
+        if (schemaDotOrgRepresentation == null) {
+            String jsonLdAsString = dv.getJsonLd();
+            schemaDotOrgRepresentation = JsonUtil.getJsonObject(jsonLdAsString);
+        }
+        return schemaDotOrgRepresentation;
+    }
+
+    @Override
+    public JsonObject getDatasetORE() {
+        if (oreRepresentation == null) {
+            oreRepresentation = new OREMap(dv).getOREMap();
+        }
+        return oreRepresentation;
+    }
+
+    @Override
+    public String getDataCiteXml() {
+        return DOIDataCiteRegisterService.getMetadataFromDvObject(
+                dv.getDataset().getGlobalId().asString(), new DataCitation(dv).getDataCiteMetadata(), dv.getDataset());
+    }
+    
+    @Override
+    public JsonArray getDatasetFileDetails() {
+        JsonArrayBuilder jab = Json.createArrayBuilder();
+        for (FileMetadata fileMetadata : dv.getFileMetadatas()) {
+            DataFile dataFile = fileMetadata.getDataFile();
+            jab.add(JsonPrinter.json(dataFile, fileMetadata, true));
+        }
+        return jab.build();
+    }
+    
+    @Override
+    public Optional<InputStream> getPrerequisiteInputStream() {
+        return Optional.ofNullable(is);
+    }
+
+    public void setPrerequisiteInputStream(InputStream prereqStream) {
+        this.is=prereqStream;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
index 0c87e02456d..a54e61c7c1e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/JSONExporter.java
@@ -2,12 +2,16 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.OutputStream;
-import javax.json.JsonObject;
-import javax.ws.rs.core.MediaType;
+import java.util.Locale;
+import java.util.Optional;
+
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.core.MediaType;
 
 
 /**
@@ -18,30 +22,26 @@
 public class JSONExporter implements Exporter {
 
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return "dataverse_json";
     }
 
     @Override
-    public String getDisplayName() {
-        return  BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.json") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.json") : "JSON";
+    public String getDisplayName(Locale locale) {
+        String displayName = BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.json", locale); 
+        return Optional.ofNullable(displayName).orElse("JSON");
     }
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try{
-            outputStream.write(json.toString().getBytes("UTF8"));
+            outputStream.write(dataProvider.getDatasetJson().toString().getBytes("UTF8"));
             outputStream.flush();
         } catch (Exception e){
             throw new ExportException("Unknown exception caught during JSON export.");
         }
     }
 
-    @Override
-    public Boolean isXMLFormat() {
-        return false;
-    }
-    
     @Override
     public Boolean isHarvestable() {
         return true;
@@ -51,27 +51,7 @@ public Boolean isHarvestable() {
     public Boolean isAvailableToUsers() {
         return true;
     }
-    
-    @Override
-    public String getXMLNameSpace() throws ExportException {
-        throw new ExportException ("JSONExporter: not an XML format.");   
-    }
-    
-    @Override
-    public String getXMLSchemaLocation() throws ExportException {
-        throw new ExportException ("JSONExporter: not an XML format."); 
-    }
-    
-    @Override
-    public String getXMLSchemaVersion() throws ExportException {
-        throw new ExportException ("JSONExporter: not an XML format."); 
-    }
-    
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter doesn't need/doesn't currently take any parameters
-    }
-    
+
     @Override
     public String getMediaType() {
         return MediaType.APPLICATION_JSON;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java
index 30934cfc891..0b4121c6025 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_DDIExporter.java
@@ -2,12 +2,17 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.OutputStream;
-import javax.json.JsonObject;
+import java.util.Locale;
+import java.util.Optional;
+
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 
 /**
@@ -18,33 +23,30 @@
  * @author skraffmi
  */
 @AutoService(Exporter.class)
-public class OAI_DDIExporter implements Exporter {
+public class OAI_DDIExporter implements XMLExporter {
     
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         // TODO: Consider adding this "short form" to the "Export Metadata" dropdown in the GUI.
         return "oai_ddi";
     }
 
     @Override
-    public String getDisplayName() {
-        return  BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi") != null ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi") : "DDI";
+    public String getDisplayName(Locale locale) {
+        // dataset.exportBtn.itemLabel.ddi is shared with the DDIExporter
+        String displayName = BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.ddi",locale);
+        return Optional.ofNullable(displayName).orElse("DDI");
     }
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try {
-            DdiExportUtil.datasetJson2ddi(json, outputStream);
+            DdiExportUtil.datasetJson2ddi(dataProvider.getDatasetJson(), outputStream);
         } catch (XMLStreamException xse) {
             throw new ExportException ("Caught XMLStreamException performing DDI export");
         }
     }
 
-    @Override
-    public Boolean isXMLFormat() {
-        return true; 
-    }
-    
     @Override
     public Boolean isHarvestable() {
         return true;
@@ -56,22 +58,17 @@ public Boolean isAvailableToUsers() {
     }
     
     @Override
-    public String getXMLNameSpace() throws ExportException {
+    public String getXMLNameSpace() {
         return DDIExporter.DEFAULT_XML_NAMESPACE;
     }
     
     @Override
-    public String getXMLSchemaLocation() throws ExportException {
+    public String getXMLSchemaLocation() {
         return DDIExporter.DEFAULT_XML_SCHEMALOCATION;
     }
     
     @Override
-    public String getXMLSchemaVersion() throws ExportException {
+    public String getXMLSchemaVersion() {
         return DDIExporter.DEFAULT_XML_VERSION;
     }
-    
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter does not uses or supports any parameters as of now.
-    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
index 87adc1b4c5b..feec4403570 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OAI_OREExporter.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.export.ExportException;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.bagit.OREMap;
+
 import java.io.OutputStream;
-import java.util.ResourceBundle;
+import java.util.Locale;
+import java.util.Optional;
 import java.util.logging.Logger;
 
-import javax.json.JsonObject;
-import javax.ws.rs.core.MediaType;
+import jakarta.json.JsonObject;
+import jakarta.ws.rs.core.MediaType;
 
 @AutoService(Exporter.class)
 public class OAI_OREExporter implements Exporter {
@@ -22,10 +22,11 @@ public class OAI_OREExporter implements Exporter {
     public static final String NAME = "OAI_ORE";
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream)
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream)
             throws ExportException {
         try {
-            new OREMap(version).writeOREMap(outputStream);
+            outputStream.write(dataProvider.getDatasetORE().toString().getBytes("UTF8"));
+            outputStream.flush();
         } catch (Exception e) {
             logger.severe(e.getMessage());
             e.printStackTrace();
@@ -34,20 +35,14 @@ public void exportDataset(DatasetVersion version, JsonObject json, OutputStream
 
 
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return NAME;
     }
 
     @Override
-    public String getDisplayName() {
-        return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.oai_ore") != null
-                ? BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.oai_ore")
-                : "OAI_ORE";
-    }
-
-    @Override
-    public Boolean isXMLFormat() {
-        return false;
+    public String getDisplayName(Locale locale) {
+        String displayName = BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.oai_ore", locale);
+        return Optional.ofNullable(displayName).orElse("OAI_ORE");
     }
 
     @Override
@@ -60,26 +55,6 @@ public Boolean isAvailableToUsers() {
         return true;
     }
 
-    @Override
-    public String getXMLNameSpace() throws ExportException {
-        throw new ExportException(OAI_OREExporter.class.getSimpleName() + ": not an XML format.");
-    }
-
-    @Override
-    public String getXMLSchemaLocation() throws ExportException {
-        throw new ExportException(OAI_OREExporter.class.getSimpleName() + ": not an XML format.");
-    }
-
-    @Override
-    public String getXMLSchemaVersion() throws ExportException {
-        throw new ExportException(SchemaDotOrgExporter.class.getSimpleName() + ": not an XML format.");
-    }
-
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter doesn't need/doesn't currently take any parameters
-    }
-    
     @Override
     public String getMediaType() {
         return MediaType.APPLICATION_JSON;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java
index f4ce294f6e9..8bd4ae6a042 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/OpenAireExporter.java
@@ -1,48 +1,45 @@
 package edu.harvard.iq.dataverse.export;
 
 import java.io.OutputStream;
+import java.util.Locale;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamException;
 
 import com.google.auto.service.AutoService;
 
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.export.openaire.OpenAireExportUtil;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 
 @AutoService(Exporter.class)
-public class OpenAireExporter implements Exporter {
+public class OpenAireExporter implements XMLExporter {
 
     public OpenAireExporter() {
     }
 
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return "oai_datacite";
     }
 
     @Override
-    public String getDisplayName() {
-        return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dataciteOpenAIRE");
+    public String getDisplayName(Locale locale) {
+        return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.dataciteOpenAIRE", locale);
     }
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream)
-            throws ExportException {
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
         try {
-            OpenAireExportUtil.datasetJson2openaire(json, outputStream);
+            OpenAireExportUtil.datasetJson2openaire(dataProvider.getDatasetJson(), outputStream);
         } catch (XMLStreamException xse) {
             throw new ExportException("Caught XMLStreamException performing DataCite OpenAIRE export", xse);
         }
     }
 
-    @Override
-    public Boolean isXMLFormat() {
-        return true;
-    }
-
     @Override
     public Boolean isHarvestable() {
         return true;
@@ -54,22 +51,17 @@ public Boolean isAvailableToUsers() {
     }
 
     @Override
-    public String getXMLNameSpace() throws ExportException {
+    public String getXMLNameSpace() {
         return OpenAireExportUtil.RESOURCE_NAMESPACE;
     }
 
     @Override
-    public String getXMLSchemaLocation() throws ExportException {
+    public String getXMLSchemaLocation() {
         return OpenAireExportUtil.RESOURCE_SCHEMA_LOCATION;
     }
 
     @Override
-    public String getXMLSchemaVersion() throws ExportException {
+    public String getXMLSchemaVersion() {
         return OpenAireExportUtil.SCHEMA_VERSION;
     }
-
-    @Override
-    public void setParam(String name, Object value) {
-        // not used
-    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
index 971f0e5afa5..5428715b905 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporter.java
@@ -1,17 +1,15 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.auto.service.AutoService;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.IOException;
 import java.io.OutputStream;
-import java.io.StringReader;
+import java.util.Locale;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MediaType;
 
 /**
  * Schema.org JSON-LD is used by Google Dataset Search and other services to
@@ -19,8 +17,8 @@
  * and available as an export format.
  * <p>
  * Do not make any backward incompatible changes unless it's absolutely
- * necessary and list them in the API Guide. The existing list is in the
- * "Native API" section.
+ * necessary and list them in the API Guide. The existing list is in the "Native
+ * API" section.
  * <p>
  * {@link SchemaDotOrgExporterTest} has most of the tests but
  * {@link DatasetVersionTest} has some as well. See
@@ -75,41 +73,33 @@ public class SchemaDotOrgExporter implements Exporter {
     public static final String NAME = "schema.org";
 
     @Override
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException {
-        String jsonLdAsString = version.getJsonLd();
-        try (JsonReader jsonReader = Json.createReader(new StringReader(jsonLdAsString));) {
-            JsonObject jsonLdJsonObject = jsonReader.readObject();
-            try {
-                outputStream.write(jsonLdJsonObject.toString().getBytes("UTF8"));
-            } catch (IOException ex) {
-                logger.info("IOException calling outputStream.write: " + ex);
-            }
-            try {
-                outputStream.flush();
-            } catch (IOException ex) {
-                logger.info("IOException calling outputStream.flush: " + ex);
-            }
+    public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) throws ExportException {
+        try {
+            outputStream.write(dataProvider.getDatasetSchemaDotOrg().toString().getBytes("UTF8"));
+        } catch (IOException ex) {
+            logger.info("IOException calling outputStream.write: " + ex);
+        }
+        try {
+            outputStream.flush();
+        } catch (IOException ex) {
+            logger.info("IOException calling outputStream.flush: " + ex);
         }
     }
 
     @Override
-    public String getProviderName() {
+    public String getFormatName() {
         return NAME;
     }
 
     @Override
-    public String getDisplayName() {
-        return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.schemaDotOrg");
-    }
-
-    @Override
-    public Boolean isXMLFormat() {
-        return false;
+    public String getDisplayName(Locale locale) {
+        return BundleUtil.getStringFromBundle("dataset.exportBtn.itemLabel.schemaDotOrg", locale);
     }
 
     @Override
     public Boolean isHarvestable() {
-        // Defer harvesting because the current effort was estimated as a "2": https://github.com/IQSS/dataverse/issues/3700
+        // Defer harvesting because the current effort was estimated as a "2":
+        // https://github.com/IQSS/dataverse/issues/3700
         return false;
     }
 
@@ -118,27 +108,6 @@ public Boolean isAvailableToUsers() {
         return true;
     }
 
-    @Override
-    public String getXMLNameSpace() throws ExportException {
-        throw new ExportException(SchemaDotOrgExporter.class.getSimpleName() + ": not an XML format.");
-    }
-
-    @Override
-    public String getXMLSchemaLocation() throws ExportException {
-        throw new ExportException(SchemaDotOrgExporter.class.getSimpleName() + ": not an XML format.");
-    }
-
-    @Override
-    public String getXMLSchemaVersion() throws ExportException {
-        throw new ExportException(SchemaDotOrgExporter.class.getSimpleName() + ": not an XML format.");
-    }
-
-    @Override
-    public void setParam(String name, Object value) {
-        // this exporter doesn't need/doesn't currently take any parameters
-    }
-    
-
     @Override
     public String getMediaType() {
         return MediaType.APPLICATION_JSON;
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
index eb7632dd03c..9a689f7a4ed 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtil.java
@@ -3,26 +3,13 @@
 import com.google.gson.Gson;
 
 import edu.harvard.iq.dataverse.ControlledVocabularyValue;
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.DataTable;
 import edu.harvard.iq.dataverse.DatasetFieldConstant;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.DvObjectContainer;
-import edu.harvard.iq.dataverse.FileMetadata;
-import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
 import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
 import edu.harvard.iq.dataverse.api.dto.FieldDTO;
 import edu.harvard.iq.dataverse.api.dto.FileDTO;
 import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
-import edu.harvard.iq.dataverse.datavariable.VariableMetadata;
-import edu.harvard.iq.dataverse.datavariable.DataVariable;
-import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
-import edu.harvard.iq.dataverse.datavariable.VariableRange;
-import edu.harvard.iq.dataverse.datavariable.SummaryStatistic;
-import edu.harvard.iq.dataverse.datavariable.VariableCategory;
-import edu.harvard.iq.dataverse.datavariable.VarGroup;
-import edu.harvard.iq.dataverse.datavariable.CategoryMetadata;
 
 import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.LEVEL_FILE;
 import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_SUBJECT_TAG;
@@ -30,11 +17,10 @@
 import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_TAG;
 import static edu.harvard.iq.dataverse.export.DDIExportServiceBean.NOTE_TYPE_UNF;
 import edu.harvard.iq.dataverse.export.DDIExporter;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
 
-import edu.harvard.iq.dataverse.util.BundleUtil;
-import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.xml.XmlPrinter;
@@ -42,26 +28,29 @@
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.file.Files;
-import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.time.LocalDate;
 import java.util.*;
+import java.util.Map.Entry;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.json.JsonObject;
+import jakarta.ejb.EJB;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
 import javax.xml.stream.XMLOutputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
 
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.FactoryConfigurationError;
 import javax.xml.parsers.ParserConfigurationException;
 import org.xml.sax.SAXException;
-import org.xml.sax.SAXParseException;
 import org.w3c.dom.Document;
 import org.apache.commons.lang3.StringUtils;
-import org.w3c.dom.DOMException;
 
 // For write operation
 import javax.xml.transform.Transformer;
@@ -71,9 +60,7 @@
 import javax.xml.transform.dom.DOMSource;
 import javax.xml.transform.stream.StreamSource;
 import javax.xml.transform.stream.StreamResult;
-import java.io.File;
 import java.io.InputStream;
-import java.io.InputStreamReader;
 
 public class DdiExportUtil {
 
@@ -93,7 +80,6 @@ public class DdiExportUtil {
     public static final String CITATION_BLOCK_NAME = "citation";
 
     public static String datasetDtoAsJson2ddi(String datasetDtoAsJson) {
-        logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson));
         Gson gson = new Gson();
         DatasetDTO datasetDto = gson.fromJson(datasetDtoAsJson, DatasetDTO.class);
         try {
@@ -137,7 +123,7 @@ private static void dtoddi(DatasetDTO datasetDto, OutputStream outputStream) thr
 
     
     // "full" ddi, with the the "<fileDscr>"  and "<dataDscr>/<var>" sections: 
-    public static void datasetJson2ddi(JsonObject datasetDtoAsJson, DatasetVersion version, OutputStream outputStream) throws XMLStreamException {
+    public static void datasetJson2ddi(JsonObject datasetDtoAsJson, JsonArray fileDetails, OutputStream outputStream) throws XMLStreamException {
         logger.fine(JsonUtil.prettyPrint(datasetDtoAsJson.toString()));
         Gson gson = new Gson();
         DatasetDTO datasetDto = gson.fromJson(datasetDtoAsJson.toString(), DatasetDTO.class);
@@ -152,9 +138,9 @@ public static void datasetJson2ddi(JsonObject datasetDtoAsJson, DatasetVersion v
             writeAttribute(xmlw, "xml:lang", datasetDto.getMetadataLanguage());
         }
         createStdyDscr(xmlw, datasetDto);
-        createFileDscr(xmlw, version);
-        createDataDscr(xmlw, version);
-        createOtherMatsFromFileMetadatas(xmlw, version.getFileMetadatas());
+        createFileDscr(xmlw, fileDetails);
+        createDataDscr(xmlw, fileDetails);
+        createOtherMatsFromFileMetadatas(xmlw, fileDetails);
         xmlw.writeEndElement(); // codeBook
         xmlw.flush();
     }
@@ -181,7 +167,7 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto)
         String pidUri = pid;
         //Some tests don't send real PIDs - don't try to get their URL form
         if(!pidUri.equals("null:null/null")) {
-            pidUri= new GlobalId(persistentProtocol + ":" + persistentAuthority + "/" + persistentId).toURL().toString();
+            pidUri= PidUtil.parseAsGlobalID(persistentProtocol, persistentAuthority, persistentId).asURL();
         }
         // The "persistentAgency" tag is used for the "agency" attribute of the 
         // <IDNo> ddi section; back in the DVN3 days we used "handle" and "DOI" 
@@ -202,7 +188,10 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto)
        
         writeFullElement(xmlw, "titl", dto2Primitive(version, DatasetFieldConstant.title), datasetDto.getMetadataLanguage());
         writeFullElement(xmlw, "subTitl", dto2Primitive(version, DatasetFieldConstant.subTitle));
-        writeFullElement(xmlw, "altTitl", dto2Primitive(version, DatasetFieldConstant.alternativeTitle));
+        FieldDTO altField = dto2FieldDTO( version, DatasetFieldConstant.alternativeTitle, "citation"  );
+        if (altField != null) {
+            writeMultipleElement(xmlw, "altTitl", altField, datasetDto.getMetadataLanguage());
+        }
         
         xmlw.writeStartElement("IDNo");
         writeAttribute(xmlw, "agency", persistentAgency);
@@ -235,9 +224,11 @@ private static void createStdyDscr(XMLStreamWriter xmlw, DatasetDTO datasetDto)
         }
         writeDistributorsElement(xmlw, version, datasetDto.getMetadataLanguage());
         writeContactsElement(xmlw, version);
-        writeFullElement(xmlw, "distDate", dto2Primitive(version, DatasetFieldConstant.distributionDate));
+        /* per SCHEMA, depositr comes before depDate! - L.A. */
         writeFullElement(xmlw, "depositr", dto2Primitive(version, DatasetFieldConstant.depositor));
+        /* ... and depDate comes before distDate - L.A. */
         writeFullElement(xmlw, "depDate", dto2Primitive(version, DatasetFieldConstant.dateOfDeposit));
+        writeFullElement(xmlw, "distDate", dto2Primitive(version, DatasetFieldConstant.distributionDate));
 
         xmlw.writeEndElement(); // diststmt
 
@@ -290,23 +281,16 @@ private static void writeOtherStudyMaterial(XMLStreamWriter xmlw , DatasetVersio
         xmlw.writeEndElement(); //othrStdyMat
     }
 
+    /*
+            <xs:sequence>
+               <xs:element ref="setAvail" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="useStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+    */
     private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO version) throws XMLStreamException {
         xmlw.writeStartElement("dataAccs");
-        if (version.getTermsOfUse() != null && !version.getTermsOfUse().trim().equals("")) {
-            xmlw.writeStartElement("notes");
-            writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_USE);
-            writeAttribute(xmlw, "level", LEVEL_DV);
-            xmlw.writeCharacters(version.getTermsOfUse());
-            xmlw.writeEndElement(); //notes
-        }
-        if (version.getTermsOfAccess() != null && !version.getTermsOfAccess().trim().equals("")) {
-            xmlw.writeStartElement("notes");
-            writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_ACCESS);
-            writeAttribute(xmlw, "level", LEVEL_DV);
-            xmlw.writeCharacters(version.getTermsOfAccess());
-            xmlw.writeEndElement(); //notes
-        }
-
+        
         xmlw.writeStartElement("setAvail");
         writeFullElement(xmlw, "accsPlac", version.getDataAccessPlace());
         writeFullElement(xmlw, "origArch", version.getOriginalArchive());
@@ -314,6 +298,7 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver
         writeFullElement(xmlw, "collSize", version.getSizeOfCollection());
         writeFullElement(xmlw, "complete", version.getStudyCompletion());
         xmlw.writeEndElement(); //setAvail
+        
         xmlw.writeStartElement("useStmt");
         writeFullElement(xmlw, "confDec", version.getConfidentialityDeclaration());
         writeFullElement(xmlw, "specPerm", version.getSpecialPermissions());
@@ -324,6 +309,15 @@ private static void writeDataAccess(XMLStreamWriter xmlw , DatasetVersionDTO ver
         writeFullElement(xmlw, "conditions", version.getConditions());
         writeFullElement(xmlw, "disclaimer", version.getDisclaimer());
         xmlw.writeEndElement(); //useStmt
+        
+        /* any <note>s: */
+        if (version.getTermsOfAccess() != null && !version.getTermsOfAccess().trim().equals("")) {
+            xmlw.writeStartElement("notes");
+            writeAttribute(xmlw, "type", NOTE_TYPE_TERMS_OF_ACCESS);
+            writeAttribute(xmlw, "level", LEVEL_DV);
+            xmlw.writeCharacters(version.getTermsOfAccess());
+            xmlw.writeEndElement(); //notes
+        }
         xmlw.writeEndElement(); //dataAccs
     }
     
@@ -384,141 +378,222 @@ private static void writeVersionStatement(XMLStreamWriter xmlw, DatasetVersionDT
         xmlw.writeEndElement(); // verStmt
     }
     
+    /* From the DDI 2.5 schema: 
+            <xs:sequence>
+               <xs:element ref="timePrd" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="collDate" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="nation" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="geogCover" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="geogUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="geoBndBox" minOccurs="0"/>
+               <xs:element ref="boundPoly" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="anlyUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="universe" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataKind" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+    */
     private static void writeSummaryDescriptionElement(XMLStreamWriter xmlw, DatasetVersionDTO datasetVersionDTO, String lang) throws XMLStreamException {
         xmlw.writeStartElement("sumDscr");
+        FieldDTO timePeriodCoveredDTO = null;
+        FieldDTO dateOfCollectionDTO = null;
+        FieldDTO geographicCoverageDTO = null;
+        FieldDTO geographicBoundingBoxDTO = null;
+        FieldDTO unitOfAnalysisDTO = null;
+        FieldDTO universeDTO = null;
+        FieldDTO kindOfDataDTO = null;
+
         for (Map.Entry<String, MetadataBlockDTO> entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
             String key = entry.getKey();
             MetadataBlockDTO value = entry.getValue();
+
             if ("citation".equals(key)) {
-                Integer per = 0;
-                Integer coll = 0;
                 for (FieldDTO fieldDTO : value.getFields()) {
                     if (DatasetFieldConstant.timePeriodCovered.equals(fieldDTO.getTypeName())) {
-                        String dateValStart = "";
-                        String dateValEnd = "";
-                        for (HashSet<FieldDTO> foo : fieldDTO.getMultipleCompound()) {
-                            per++;
-                            for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
-                                FieldDTO next = iterator.next();
-                                if (DatasetFieldConstant.timePeriodCoveredStart.equals(next.getTypeName())) {
-                                    dateValStart = next.getSinglePrimitive();
-                                }
-                                if (DatasetFieldConstant.timePeriodCoveredEnd.equals(next.getTypeName())) {
-                                    dateValEnd = next.getSinglePrimitive();
-                                }
-                            }
-                            if (!dateValStart.isEmpty()) {
-                                writeDateElement(xmlw, "timePrd", "P"+ per.toString(), "start", dateValStart );
-                            }
-                            if (!dateValEnd.isEmpty()) {
-                                writeDateElement(xmlw, "timePrd",  "P"+ per.toString(), "end", dateValEnd );
-                            }
-                        }
+                        timePeriodCoveredDTO = fieldDTO;
                     }
+
                     if (DatasetFieldConstant.dateOfCollection.equals(fieldDTO.getTypeName())) {
-                        String dateValStart = "";
-                        String dateValEnd = "";
-                        for (HashSet<FieldDTO> foo : fieldDTO.getMultipleCompound()) {
-                            coll++;
-                            for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
-                                FieldDTO next = iterator.next();
-                                if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) {
-                                    dateValStart = next.getSinglePrimitive();
-                                }
-                                if (DatasetFieldConstant.dateOfCollectionEnd.equals(next.getTypeName())) {
-                                    dateValEnd = next.getSinglePrimitive();
-                                }
-                            }
-                            if (!dateValStart.isEmpty()) {
-                                writeDateElement(xmlw, "collDate",  "P"+ coll.toString(), "start", dateValStart );
-                            }
-                            if (!dateValEnd.isEmpty()) {
-                                writeDateElement(xmlw,  "collDate",  "P"+ coll.toString(), "end", dateValEnd );
-                            }
-                        }
+                        dateOfCollectionDTO = fieldDTO;
                     }
+
                     if (DatasetFieldConstant.kindOfData.equals(fieldDTO.getTypeName())) {
-                        writeMultipleElement(xmlw, "dataKind", fieldDTO, lang);
+                        kindOfDataDTO = fieldDTO;
                     }
                 }
             }
-            
-            if("geospatial".equals(key)){                
+
+            if ("geospatial".equals(key)) {
                 for (FieldDTO fieldDTO : value.getFields()) {
                     if (DatasetFieldConstant.geographicCoverage.equals(fieldDTO.getTypeName())) {
-
-                        for (HashSet<FieldDTO> foo : fieldDTO.getMultipleCompound()) {
-                            HashMap<String, String> geoMap = new HashMap<>();
-                            for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
-                                FieldDTO next = iterator.next();
-                                if (DatasetFieldConstant.country.equals(next.getTypeName())) {
-                                    geoMap.put("country", next.getSinglePrimitive());
-                                }
-                                if (DatasetFieldConstant.city.equals(next.getTypeName())) {
-                                    geoMap.put("city", next.getSinglePrimitive());
-                                }
-                                if (DatasetFieldConstant.state.equals(next.getTypeName())) {
-                                    geoMap.put("state", next.getSinglePrimitive());
-                                } 
-                                if (DatasetFieldConstant.otherGeographicCoverage.equals(next.getTypeName())) {
-                                    geoMap.put("otherGeographicCoverage", next.getSinglePrimitive());
-                                } 
-                            }
-
-                            if (geoMap.get("country") != null) {
-                                writeFullElement(xmlw, "nation", geoMap.get("country"));
-                            }
-                            if (geoMap.get("city") != null) {
-                                writeFullElement(xmlw, "geogCover", geoMap.get("city"));
-                            }
-                            if (geoMap.get("state") != null) {
-                                writeFullElement(xmlw, "geogCover", geoMap.get("state"));
-                            }
-                            if (geoMap.get("otherGeographicCoverage") != null) {
-                                writeFullElement(xmlw, "geogCover", geoMap.get("otherGeographicCoverage"));
-                            }
-
-                        }
+                        geographicCoverageDTO = fieldDTO;
                     }
                     if (DatasetFieldConstant.geographicBoundingBox.equals(fieldDTO.getTypeName())) {
 
-                        for (HashSet<FieldDTO> foo : fieldDTO.getMultipleCompound()) {
-                            xmlw.writeStartElement("geoBndBox");
-                            for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
-                                FieldDTO next = iterator.next();
-                                if (DatasetFieldConstant.westLongitude.equals(next.getTypeName())) {
-                                    writeFullElement(xmlw, "westBL", next.getSinglePrimitive());
-                                }
-                                if (DatasetFieldConstant.eastLongitude.equals(next.getTypeName())) {
-                                    writeFullElement(xmlw, "eastBL", next.getSinglePrimitive());
-                                }
-                                if (DatasetFieldConstant.northLatitude.equals(next.getTypeName())) {
-                                    writeFullElement(xmlw, "northBL", next.getSinglePrimitive());
-                                }  
-                                if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) {
-                                    writeFullElement(xmlw, "southBL", next.getSinglePrimitive());
-                                }
-
-                            }
-                            xmlw.writeEndElement();
-                        }
+                        geographicBoundingBoxDTO = fieldDTO;
 
                     }
                 }
-                    writeFullElementList(xmlw, "geogUnit", dto2PrimitiveList(datasetVersionDTO, DatasetFieldConstant.geographicUnit));
             }
 
-            if("socialscience".equals(key)){                
+            if ("socialscience".equals(key)) {
                 for (FieldDTO fieldDTO : value.getFields()) {
                     if (DatasetFieldConstant.universe.equals(fieldDTO.getTypeName())) {
-                        writeMultipleElement(xmlw, "universe", fieldDTO, lang);
+                        universeDTO = fieldDTO;
                     }
                     if (DatasetFieldConstant.unitOfAnalysis.equals(fieldDTO.getTypeName())) {
-                        writeI18NElementList(xmlw, "anlyUnit", fieldDTO.getMultipleVocab(), "unitOfAnalysis", fieldDTO.getTypeClass(), "socialscience", lang);
+                        unitOfAnalysisDTO = fieldDTO;
+                    }
+                }
+            }
+        }
+        /* Finally, we can write the fields we have collected, in the correct order: -L.A.*/
+
+        if (timePeriodCoveredDTO != null) {
+            String dateValStart = "";
+            String dateValEnd = "";
+            Integer per = 0;
+            for (HashSet<FieldDTO> foo : timePeriodCoveredDTO.getMultipleCompound()) {
+                per++;
+                for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
+                    FieldDTO next = iterator.next();
+                    if (DatasetFieldConstant.timePeriodCoveredStart.equals(next.getTypeName())) {
+                        dateValStart = next.getSinglePrimitive();
                     }
+                    if (DatasetFieldConstant.timePeriodCoveredEnd.equals(next.getTypeName())) {
+                        dateValEnd = next.getSinglePrimitive();
+                    }
+                }
+                if (!dateValStart.isEmpty()) {
+                    writeDateElement(xmlw, "timePrd", "P" + per.toString(), "start", dateValStart);
+                }
+                if (!dateValEnd.isEmpty()) {
+                    writeDateElement(xmlw, "timePrd", "P" + per.toString(), "end", dateValEnd);
+                }
+            }
+        }
+
+        if (dateOfCollectionDTO != null) {
+            String dateValStart = "";
+            String dateValEnd = "";
+            Integer coll = 0;
+            for (HashSet<FieldDTO> foo : dateOfCollectionDTO.getMultipleCompound()) {
+                coll++;
+                for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
+                    FieldDTO next = iterator.next();
+                    if (DatasetFieldConstant.dateOfCollectionStart.equals(next.getTypeName())) {
+                        dateValStart = next.getSinglePrimitive();
+                    }
+                    if (DatasetFieldConstant.dateOfCollectionEnd.equals(next.getTypeName())) {
+                        dateValEnd = next.getSinglePrimitive();
+                    }
+                }
+                if (!dateValStart.isEmpty()) {
+                    writeDateElement(xmlw, "collDate", "P" + coll.toString(), "start", dateValStart);
+                }
+                if (!dateValEnd.isEmpty()) {
+                    writeDateElement(xmlw, "collDate", "P" + coll.toString(), "end", dateValEnd);
+                }
+            }
+        }
+
+        /* <nation> and <geogCover> come next, in that order. -L.A. */
+        if (geographicCoverageDTO != null) {
+
+            List<String> nationList = new ArrayList<>();
+            List<String> geogCoverList = new ArrayList<>();
+
+            for (HashSet<FieldDTO> foo : geographicCoverageDTO.getMultipleCompound()) {
+                for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
+                    FieldDTO next = iterator.next();
+                    /* our "country" field maps 1:1 to the DDI "<nation>": */
+                    if (DatasetFieldConstant.country.equals(next.getTypeName())) {
+                        nationList.add(next.getSinglePrimitive());
+                    }
+                    /* city, state and otherGeographicCoverage all exported as "<geogCover>": */
+                    if (DatasetFieldConstant.city.equals(next.getTypeName())
+                            || DatasetFieldConstant.state.equals(next.getTypeName())
+                            || DatasetFieldConstant.otherGeographicCoverage.equals(next.getTypeName())) {
+                        geogCoverList.add(next.getSinglePrimitive());
+                    }
+                }
+            }
+
+            /**
+             * And now we can write all the fields encountered, first the
+             * "<nation>" entries, then all the "<geogCover>" ones:
+             */
+            for (String nationEntry : nationList) {
+                writeFullElement(xmlw, "nation", nationEntry);
+            }
+            for (String geogCoverEntry : geogCoverList) {
+                writeFullElement(xmlw, "geogCover", geogCoverEntry);
+            }
+        }
+
+        writeFullElementList(xmlw, "geogUnit", dto2PrimitiveList(datasetVersionDTO, DatasetFieldConstant.geographicUnit));
+
+        /* Only 1 geoBndBox is allowed in the DDI.
+           So, I'm just going to arbitrarily use the first one, and ignore the rest! -L.A. */
+        if (geographicBoundingBoxDTO != null) {
+            HashSet<FieldDTO> bndBoxSet = geographicBoundingBoxDTO.getMultipleCompound().get(0);
+            xmlw.writeStartElement("geoBndBox");
+            HashMap<String, String> geoBndBoxMap = new HashMap<>();
+            for (FieldDTO next : bndBoxSet) {
+                if (DatasetFieldConstant.westLongitude.equals(next.getTypeName())) {
+                    geoBndBoxMap.put("westBL", next.getSinglePrimitive());
+                }
+                if (DatasetFieldConstant.eastLongitude.equals(next.getTypeName())) {
+                    geoBndBoxMap.put("eastBL", next.getSinglePrimitive());
+                }
+                if (DatasetFieldConstant.northLatitude.equals(next.getTypeName())) {
+                    geoBndBoxMap.put("northBL", next.getSinglePrimitive());
+                }
+                if (DatasetFieldConstant.southLatitude.equals(next.getTypeName())) {
+                    geoBndBoxMap.put("southBL", next.getSinglePrimitive());
                 }
             }
+
+            /* Once again, order is important! */
+ /*
+                        <xs:sequence>
+                            <xs:element ref="westBL"/>
+                            <xs:element ref="eastBL"/>
+                            <xs:element ref="southBL"/>
+                            <xs:element ref="northBL"/>
+                        </xs:sequence>
+             */
+            if (geoBndBoxMap.get("westBL") != null) {
+                writeFullElement(xmlw, "westBL", geoBndBoxMap.get("westBL"));
+            }
+            if (geoBndBoxMap.get("eastBL") != null) {
+                writeFullElement(xmlw, "eastBL", geoBndBoxMap.get("eastBL"));
+            }
+            if (geoBndBoxMap.get("southBL") != null) {
+                writeFullElement(xmlw, "southBL", geoBndBoxMap.get("southBL"));
+            }
+            if (geoBndBoxMap.get("northBL") != null) {
+                writeFullElement(xmlw, "northBL", geoBndBoxMap.get("northBL"));
+            }
+
+            xmlw.writeEndElement();
+        }
+
+        /* analyUnit: */
+        if (unitOfAnalysisDTO != null) {
+            writeI18NElementList(xmlw, "anlyUnit", unitOfAnalysisDTO.getMultipleVocab(), "unitOfAnalysis", unitOfAnalysisDTO.getTypeClass(), "socialscience", lang);
+
+        }
+
+        /* universe: */
+        if (universeDTO != null) {
+            writeMultipleElement(xmlw, "universe", universeDTO, lang);
+        }
+
+        /* finally, any "kind of data" entries: */
+        if (kindOfDataDTO != null) {
+            writeMultipleElement(xmlw, "dataKind", kindOfDataDTO, lang);
         }
+
         xmlw.writeEndElement(); //sumDscr     
     }
     
@@ -540,6 +615,29 @@ private static void writeDateElement(XMLStreamWriter xmlw, String element, Strin
 
     }
     
+    /**
+     * Again, <dataColl> is an xs:sequence - order is important and must follow
+     * the schema. -L.A.
+     * <xs:sequence>
+     * <xs:element ref="timeMeth" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="dataCollector" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="collectorTraining" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="frequenc" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="sampProc" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="sampleFrame" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="targetSampleSize" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="deviat" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="collMode" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="resInstru" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="instrumentDevelopment" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="sources" minOccurs="0"/>
+     * <xs:element ref="collSitu" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="actMin" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="ConOps" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="weight" minOccurs="0" maxOccurs="unbounded"/>
+     * <xs:element ref="cleanOps" minOccurs="0" maxOccurs="unbounded"/>
+     * </xs:sequence>
+     */
     private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO version, String lang) throws XMLStreamException{
         xmlw.writeStartElement("method");
         xmlw.writeStartElement("dataColl");
@@ -553,13 +651,7 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO
 
         writeI18NElement(xmlw, "deviat", version, DatasetFieldConstant.deviationsFromSampleDesign, lang);
 
-        xmlw.writeStartElement("sources");
-        writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources));
-        writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang);
-        writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang);
-        writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang);
-        xmlw.writeEndElement(); //sources
-
+        /* <collMode> comes before <sources>: */
         FieldDTO collModeFieldDTO = dto2FieldDTO(version, DatasetFieldConstant.collectionMode, "socialscience");
         if (collModeFieldDTO != null) {
             // This field was made multiple as of 5.10
@@ -571,21 +663,33 @@ private static void writeMethodElement(XMLStreamWriter xmlw , DatasetVersionDTO
                 writeI18NElement(xmlw, "collMode", version, DatasetFieldConstant.collectionMode, lang);
             }
         }
+        /* and so does <resInstru>: */
         writeI18NElement(xmlw, "resInstru", version, DatasetFieldConstant.researchInstrument, lang); 
+        xmlw.writeStartElement("sources");
+        writeFullElementList(xmlw, "dataSrc", dto2PrimitiveList(version, DatasetFieldConstant.dataSources));
+        writeI18NElement(xmlw, "srcOrig", version, DatasetFieldConstant.originOfSources, lang);
+        writeI18NElement(xmlw, "srcChar", version, DatasetFieldConstant.characteristicOfSources, lang);
+        writeI18NElement(xmlw, "srcDocu", version, DatasetFieldConstant.accessToSources, lang);
+        xmlw.writeEndElement(); //sources
+
+        
         writeI18NElement(xmlw, "collSitu", version, DatasetFieldConstant.dataCollectionSituation, lang);
         writeI18NElement(xmlw, "actMin", version, DatasetFieldConstant.actionsToMinimizeLoss, lang);
-        writeI18NElement(xmlw, "conOps", version, DatasetFieldConstant.controlOperations, lang);
+        /* "<ConOps>" has the uppercase C: */
+        writeI18NElement(xmlw, "ConOps", version, DatasetFieldConstant.controlOperations, lang);
         writeI18NElement(xmlw, "weight", version, DatasetFieldConstant.weighting, lang);  
         writeI18NElement(xmlw, "cleanOps", version, DatasetFieldConstant.cleaningOperations, lang);
 
         xmlw.writeEndElement(); //dataColl
+        /* <notes> before <anlyInfo>: */
+        writeNotesElement(xmlw, version);
+
         xmlw.writeStartElement("anlyInfo");
         //writeFullElement(xmlw, "anylInfo", dto2Primitive(version, DatasetFieldConstant.datasetLevelErrorNotes));
         writeI18NElement(xmlw, "respRate", version, DatasetFieldConstant.responseRate, lang);
         writeI18NElement(xmlw, "EstSmpErr", version, DatasetFieldConstant.samplingErrorEstimates, lang);
         writeI18NElement(xmlw, "dataAppr", version, DatasetFieldConstant.otherDataAppraisal, lang); 
         xmlw.writeEndElement(); //anlyInfo
-        writeNotesElement(xmlw, version);
         
         xmlw.writeEndElement();//method
     }
@@ -848,7 +952,6 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT
                             String producerAffiliation = "";
                             String producerAbbreviation = "";
                             String producerLogo = "";
-                            String producerURL = "";
                             for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
                                 FieldDTO next = iterator.next();
                                 if (DatasetFieldConstant.producerName.equals(next.getTypeName())) {
@@ -863,10 +966,6 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT
                                 if (DatasetFieldConstant.producerLogo.equals(next.getTypeName())) {
                                     producerLogo = next.getSinglePrimitive();
                                 }
-                                if (DatasetFieldConstant.producerURL.equals(next.getTypeName())) {
-                                    producerURL = next.getSinglePrimitive();
-
-                                }
                             }
                             if (!producerName.isEmpty()) {
                                 xmlw.writeStartElement("producer");
@@ -876,12 +975,9 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT
                                 if (!producerAbbreviation.isEmpty()) {
                                     writeAttribute(xmlw, "abbr", producerAbbreviation);
                                 }
-                                if (!producerLogo.isEmpty()) {
+                                /*if (!producerLogo.isEmpty()) {
                                     writeAttribute(xmlw, "role", producerLogo);
-                                }
-                                if (!producerURL.isEmpty()) {
-                                    writeAttribute(xmlw, "URI", producerURL);
-                                }
+                                }*/
                                 xmlw.writeCharacters(producerName);
                                 xmlw.writeEndElement(); //AuthEnty
                             }
@@ -891,8 +987,15 @@ private static void writeProducersElement(XMLStreamWriter xmlw, DatasetVersionDT
                 }
             }
         }
-        writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate));    
-        writeFullElement(xmlw, "prodPlac", dto2Primitive(version, DatasetFieldConstant.productionPlace));
+        writeFullElement(xmlw, "prodDate", dto2Primitive(version, DatasetFieldConstant.productionDate));
+        // productionPlace was made multiple as of 5.14:
+        // (a quick backward compatibility check was added to dto2PrimitiveList(),
+        // see the method for details)
+
+        FieldDTO  prodPlac = dto2FieldDTO( version, DatasetFieldConstant.productionPlace, "citation"  );
+        if (prodPlac != null) {
+            writeMultipleElement(xmlw, "prodPlac", prodPlac, null);
+        }
         writeSoftwareElement(xmlw, version);
   
         writeGrantElement(xmlw, version);
@@ -912,7 +1015,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio
                             String distributorAffiliation = "";
                             String distributorAbbreviation = "";
                             String distributorURL = "";
-                            String distributorLogoURL = "";
                             for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
                                 FieldDTO next = iterator.next();
                                 if (DatasetFieldConstant.distributorName.equals(next.getTypeName())) {
@@ -927,9 +1029,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio
                                 if (DatasetFieldConstant.distributorURL.equals(next.getTypeName())) {
                                     distributorURL = next.getSinglePrimitive();
                                 }
-                                if (DatasetFieldConstant.distributorLogo.equals(next.getTypeName())) {
-                                    distributorLogoURL = next.getSinglePrimitive();
-                                }
                             }
                             if (!distributorName.isEmpty()) {
                                 xmlw.writeStartElement("distrbtr");
@@ -945,9 +1044,6 @@ private static void writeDistributorsElement(XMLStreamWriter xmlw, DatasetVersio
                                 if (!distributorURL.isEmpty()) {
                                     writeAttribute(xmlw, "URI", distributorURL);
                                 }
-                                if (!distributorLogoURL.isEmpty()) {
-                                    writeAttribute(xmlw, "role", distributorLogoURL);
-                                }
                                 xmlw.writeCharacters(distributorName);
                                 xmlw.writeEndElement(); //AuthEnty
                             }
@@ -991,16 +1087,33 @@ private static void writeRelPublElement(XMLStreamWriter xmlw, DatasetVersionDTO
                             if (citation != null && !citation.trim().equals("")) {
                                 xmlw.writeStartElement("relPubl");
                                 xmlw.writeStartElement("citation");
+                                /* <xs:sequence>
+                                    <xs:element ref="titlStmt"/>
+                                    <xs:element ref="rspStmt" minOccurs="0"/>
+                                    <xs:element ref="prodStmt" minOccurs="0"/>
+                                    <xs:element ref="distStmt" minOccurs="0"/>
+                                    <xs:element ref="serStmt" minOccurs="0" maxOccurs="unbounded"/>
+                                    <xs:element ref="verStmt" minOccurs="0" maxOccurs="unbounded"/>
+                                    <xs:element ref="biblCit" minOccurs="0" maxOccurs="unbounded"/>
+                                    <xs:element ref="holdings" minOccurs="0" maxOccurs="unbounded"/>
+                                    <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+                                    <xs:group ref="dc:elementsAndRefinementsGroup"/>
+                                   </xs:sequence>
+                                 (In other words - titlStmt is mandatory! -L.A.)
+                                */
+                                xmlw.writeStartElement("titlStmt");
+                                writeFullElement(xmlw, "titl", citation);
                                 if (IDNo != null && !IDNo.trim().equals("")) {
-                                    xmlw.writeStartElement("titlStmt");
+
                                     xmlw.writeStartElement("IDNo");
                                     if (IDType != null && !IDType.trim().equals("")) {
-                                        xmlw.writeAttribute("agency", IDType );
+                                        xmlw.writeAttribute("agency", IDType);
                                     }
                                     xmlw.writeCharacters(IDNo);
                                     xmlw.writeEndElement(); //IDNo
-                                    xmlw.writeEndElement(); // titlStmt
                                 }
+                                xmlw.writeEndElement(); // titlStmt
+
 
                                 writeFullElement(xmlw,"biblCit",citation);
                                 xmlw.writeEndElement(); //citation
@@ -1172,33 +1285,34 @@ private static void writeSeriesElement(XMLStreamWriter xmlw, DatasetVersionDTO d
         for (Map.Entry<String, MetadataBlockDTO> entry : datasetVersionDTO.getMetadataBlocks().entrySet()) {
             String key = entry.getKey();
             MetadataBlockDTO value = entry.getValue();
-            if ("citation".equals(key)) {
+            if ("citation".equals(key)) {               
                 for (FieldDTO fieldDTO : value.getFields()) {
                     if (DatasetFieldConstant.series.equals(fieldDTO.getTypeName())) {
-                        xmlw.writeStartElement("serStmt");                        
                         String seriesName = "";
                         String seriesInformation = "";
-                        Set<FieldDTO> foo = fieldDTO.getSingleCompound();
+                        for (HashSet<FieldDTO> foo : fieldDTO.getMultipleCompound()) {
+                            xmlw.writeStartElement("serStmt");
                             for (Iterator<FieldDTO> iterator = foo.iterator(); iterator.hasNext();) {
                                 FieldDTO next = iterator.next();
                                 if (DatasetFieldConstant.seriesName.equals(next.getTypeName())) {
-                                    seriesName =  next.getSinglePrimitive();
+                                    seriesName = next.getSinglePrimitive();
                                 }
                                 if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) {
-                                    seriesInformation =  next.getSinglePrimitive();
+                                    seriesInformation = next.getSinglePrimitive();
                                 }
                             }
-                            if (!seriesName.isEmpty()){
-                                xmlw.writeStartElement("serName"); 
+                            if (!seriesName.isEmpty()) {
+                                xmlw.writeStartElement("serName");
                                 xmlw.writeCharacters(seriesName);
-                                xmlw.writeEndElement(); //grantno
+                                xmlw.writeEndElement(); //serName
                             }
-                            if (!seriesInformation.isEmpty()){
-                                xmlw.writeStartElement("serInfo"); 
+                            if (!seriesInformation.isEmpty()) {
+                                xmlw.writeStartElement("serInfo");
                                 xmlw.writeCharacters(seriesInformation);
-                                xmlw.writeEndElement(); //grantno
+                                xmlw.writeEndElement(); //serInfo
                             }
-                        xmlw.writeEndElement(); //serStmt
+                            xmlw.writeEndElement(); //serStmt
+                        }
                     }
                 }
             }
@@ -1225,17 +1339,18 @@ private static void writeTargetSampleElement(XMLStreamWriter xmlw, DatasetVersio
                                 actualSize = next.getSinglePrimitive();
                             }
                         }
-
-                        if (!sizeFormula.isEmpty()) {
-                            xmlw.writeStartElement("sampleSizeFormula");
-                            xmlw.writeCharacters(sizeFormula);
-                            xmlw.writeEndElement(); //sampleSizeFormula
-                        }
+                        /* <sampleSize> must come before <sampleSizeFormula>! -L.A. */
                         if (!actualSize.isEmpty()) {
                             xmlw.writeStartElement("sampleSize");
                             xmlw.writeCharacters(actualSize);
                             xmlw.writeEndElement(); //sampleSize
                         }
+                        if (!sizeFormula.isEmpty()) {
+                            xmlw.writeStartElement("sampleSizeFormula");
+                            xmlw.writeCharacters(sizeFormula);
+                            xmlw.writeEndElement(); //sampleSizeFormula
+                        }
+                        
                         xmlw.writeEndElement(); // targetSampleSize
                     }
                 }
@@ -1334,46 +1449,43 @@ private static void createOtherMats(XMLStreamWriter xmlw, List<FileDTO> fileDtos
     // otherMat, or a fileDscr section. 
     // -- L.A. 4.5 
     
-    private static void createOtherMatsFromFileMetadatas(XMLStreamWriter xmlw, List<FileMetadata> fileMetadatas) throws XMLStreamException {
+    private static void createOtherMatsFromFileMetadatas(XMLStreamWriter xmlw, JsonArray fileDetails) throws XMLStreamException {
         // The preferred URL for this dataverse, for cooking up the file access API links:
         String dataverseUrl = SystemConfig.getDataverseSiteUrlStatic();
         
-        for (FileMetadata fileMetadata : fileMetadatas) {
+        for (int i=0;i<fileDetails.size();i++) {
+            JsonObject fileJson = fileDetails.getJsonObject(i);
             // We'll continue using the scheme we've used before, in DVN2-3: non-tabular files are put into otherMat,
             // tabular ones - in fileDscr sections. (fileDscr sections have special fields for numbers of variables
             // and observations, etc.)
-            if (fileMetadata.getDataFile() != null && !fileMetadata.getDataFile().isTabularData()) {
+            if (!fileJson.containsKey("dataTables")) {
                 xmlw.writeStartElement("otherMat");
-                writeAttribute(xmlw, "ID", "f" + fileMetadata.getDataFile().getId());
-                String dfIdentifier = fileMetadata.getDataFile().getIdentifier();
-                if (dfIdentifier != null && !dfIdentifier.isEmpty()){
-                    GlobalId globalId = new GlobalId(fileMetadata.getDataFile());
-                    writeAttribute(xmlw, "URI",  globalId.toURL().toString()); 
+                writeAttribute(xmlw, "ID", "f" + fileJson.getJsonNumber(("id").toString()));
+                if (fileJson.containsKey("pidUrl")){
+                    writeAttribute(xmlw, "URI",  fileJson.getString("pidUrl")); 
                 }  else {
-                    writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + fileMetadata.getDataFile().getId()); 
+                    writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + fileJson.getJsonNumber("id").toString()); 
                 }
 
                 writeAttribute(xmlw, "level", "datafile");
                 xmlw.writeStartElement("labl");
-                xmlw.writeCharacters(fileMetadata.getLabel());
+                xmlw.writeCharacters(fileJson.getString("filename"));
                 xmlw.writeEndElement(); // labl
                 
-                String description = fileMetadata.getDescription();
-                if (description != null) {
+                if (fileJson.containsKey("description")) {
                     xmlw.writeStartElement("txt");
-                    xmlw.writeCharacters(description);
+                    xmlw.writeCharacters(fileJson.getString("description"));
                     xmlw.writeEndElement(); // txt
                 }
                 // there's no readily available field in the othermat section 
                 // for the content type (aka mime type); so we'll store it in this
                 // specially formatted notes section:
-                String contentType = fileMetadata.getDataFile().getContentType();
-                if (!StringUtilisEmpty(contentType)) {
+                if (fileJson.containsKey("contentType")) {
                     xmlw.writeStartElement("notes");
                     writeAttribute(xmlw, "level", LEVEL_FILE);
                     writeAttribute(xmlw, "type", NOTE_TYPE_CONTENTTYPE);
                     writeAttribute(xmlw, "subject", NOTE_SUBJECT_CONTENTTYPE);
-                    xmlw.writeCharacters(contentType);
+                    xmlw.writeCharacters(fileJson.getString("contentType"));
                     xmlw.writeEndElement(); // notes
                 }
                 xmlw.writeEndElement(); // otherMat
@@ -1423,7 +1535,15 @@ private static List<String> dto2PrimitiveList(DatasetVersionDTO datasetVersionDT
             MetadataBlockDTO value = entry.getValue();
             for (FieldDTO fieldDTO : value.getFields()) {
                 if (datasetFieldTypeName.equals(fieldDTO.getTypeName())) {
-                    return fieldDTO.getMultiplePrimitive();
+                    // This hack is here to make sure the export does not blow 
+                    // up on an instance that upgraded to a Dataverse version
+                    // where a certain primitive has been made multiple, but has
+                    // not yet update the block. 
+                    if (fieldDTO.getMultiple() != null && fieldDTO.getMultiple()) {
+                        return fieldDTO.getMultiplePrimitive();
+                    } else {
+                        return Arrays.asList(fieldDTO.getSinglePrimitive());
+                    }
                 }
             }
         }
@@ -1567,9 +1687,9 @@ private static void saveJsonToDisk(String datasetVersionAsJson) throws IOExcepti
     // plus, the structure of file-level metadata is currently being re-designed, 
     // so we probably should not invest any time into it right now). -- L.A. 4.5
     
-    public static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVersion) throws XMLStreamException {
+    public static void createDataDscr(XMLStreamWriter xmlw, JsonArray fileDetails) throws XMLStreamException {
 
-        if (datasetVersion.getFileMetadatas() == null || datasetVersion.getFileMetadatas().isEmpty()) {
+        if (fileDetails.isEmpty()) {
             return;
         }
 
@@ -1577,8 +1697,8 @@ public static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVe
 
         // we're not writing the opening <dataDscr> tag until we find an actual 
         // tabular datafile.
-        for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) {
-            DataFile dataFile = fileMetadata.getDataFile();
+        for (int i=0;i<fileDetails.size();i++) {
+            JsonObject fileJson = fileDetails.getJsonObject(i);
 
             /**
              * Previously (in Dataverse 5.3 and below) the dataDscr section was
@@ -1587,23 +1707,36 @@ public static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVe
              * should instead use the "Data Variable Metadata Access" endpoint.)
              * These days we skip restricted files to avoid this exposure.
              */
-            if (dataFile.isRestricted()|| FileUtil.isActivelyEmbargoed(dataFile)) {
+            if (fileJson.containsKey("restricted") && fileJson.getBoolean("restricted")) {
                 continue;
             }
-
-            if (dataFile != null && dataFile.isTabularData()) {
+            if(fileJson.containsKey("embargo")) {
+             String dateString = fileJson.getJsonObject("embargo").getString("dateAvailable");
+             LocalDate endDate = LocalDate.parse(dateString);
+             if (endDate != null && endDate.isAfter(LocalDate.now())) {
+                 //Embargo is active so skip
+                 continue;
+             }
+            }
+        
+            if (fileJson.containsKey("dataTables")) {
                 if (!tabularData) {
                     xmlw.writeStartElement("dataDscr");
                     tabularData = true;
                 }
-                for (VarGroup varGrp : fileMetadata.getVarGroups()) {
-                    createVarGroupDDI(xmlw, varGrp);
+                if(fileJson.containsKey("varGroups")) {
+                    JsonArray varGroups = fileJson.getJsonArray("varGroups");
+                    for (int j=0;j<varGroups.size();j++){
+                        createVarGroupDDI(xmlw, varGroups.getJsonObject(j));
+                    }
                 }
-
-                List<DataVariable> vars = dataFile.getDataTable().getDataVariables();
-
-                for (DataVariable var : vars) {
-                    createVarDDI(xmlw, var, fileMetadata);
+                JsonObject dataTable = fileJson.getJsonArray("dataTables").getJsonObject(0);
+                JsonArray vars = dataTable.getJsonArray("dataVariables");
+                if (vars != null) {
+                    for (int j = 0; j < vars.size(); j++) {
+                        createVarDDI(xmlw, vars.getJsonObject(j), fileJson.getJsonNumber("id").toString(),
+                                fileJson.getJsonNumber("fileMetadataId").toString());
+                    }
                 }
             }
         }
@@ -1612,108 +1745,108 @@ public static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVe
             xmlw.writeEndElement(); // dataDscr
         }
     }
-    private static void createVarGroupDDI(XMLStreamWriter xmlw, VarGroup varGrp) throws XMLStreamException {
+    private static void createVarGroupDDI(XMLStreamWriter xmlw, JsonObject varGrp) throws XMLStreamException {
         xmlw.writeStartElement("varGrp");
-        writeAttribute(xmlw, "ID", "VG" + varGrp.getId().toString());
+        writeAttribute(xmlw, "ID", "VG" + varGrp.getJsonNumber("id").toString());
         String vars = "";
-        Set<DataVariable> varsInGroup = varGrp.getVarsInGroup();
-        for (DataVariable var : varsInGroup) {
-            vars = vars + " v" + var.getId();
+        JsonArray varsInGroup = varGrp.getJsonArray("dataVariableIds");
+        for (int j=0;j<varsInGroup.size();j++){
+            vars = vars + " v" + varsInGroup.getString(j);
         }
         vars = vars.trim();
         writeAttribute(xmlw, "var", vars );
 
 
-        if (!StringUtilisEmpty(varGrp.getLabel())) {
+        if (varGrp.containsKey("label")) {
             xmlw.writeStartElement("labl");
-            xmlw.writeCharacters(varGrp.getLabel());
+            xmlw.writeCharacters(varGrp.getString("label"));
             xmlw.writeEndElement(); // group label (labl)
         }
 
         xmlw.writeEndElement(); //varGrp
     }
     
-    private static void createVarDDI(XMLStreamWriter xmlw, DataVariable dv, FileMetadata fileMetadata) throws XMLStreamException {
+    private static void createVarDDI(XMLStreamWriter xmlw, JsonObject dvar, String fileId, String fileMetadataId) throws XMLStreamException {
         xmlw.writeStartElement("var");
-        writeAttribute(xmlw, "ID", "v" + dv.getId().toString());
-        writeAttribute(xmlw, "name", dv.getName());
-
-        VariableMetadata vm = null;
-        for (VariableMetadata vmIter : dv.getVariableMetadatas()) {
-            FileMetadata fm = vmIter.getFileMetadata();
-            if (fm != null && fm.equals(fileMetadata) ){
-                vm = vmIter;
+        writeAttribute(xmlw, "ID", "v" + dvar.getJsonNumber("id").toString());
+        writeAttribute(xmlw, "name", dvar.getString("name"));
+
+        JsonObject vm = null;
+        JsonArray vmArray = dvar.getJsonArray("variableMetadata"); 
+        for (int i=0;i< vmArray.size();i++) {
+            JsonObject curVm =vmArray.getJsonObject(i); 
+            if (curVm.containsKey("fileMetadataId") && curVm.getString("fileMetadataId").equals(fileMetadataId) ){
+                vm = curVm;
                 break;
             }
         }
 
-        if (dv.getNumberOfDecimalPoints() != null) {
-            writeAttribute(xmlw, "dcml", dv.getNumberOfDecimalPoints().toString());
+        if (dvar.containsKey("numberOfDecimalPoints")) {
+            writeAttribute(xmlw, "dcml", dvar.getJsonNumber("numberOfDecimalPoints").toString());
         }
 
-        if (dv.isOrderedCategorical()) {
+        if (dvar.getBoolean("isOrderedCategorical")) {
             writeAttribute(xmlw, "nature", "ordinal");
         }
 
-        if (dv.getInterval() != null) {
-            String interval = dv.getIntervalLabel();
-            if (interval != null) {
-                writeAttribute(xmlw, "intrvl", interval);
-            }
+        if (dvar.containsKey("variableIntervalType")) {
+            writeAttribute(xmlw, "intrvl", dvar.getString("variableIntervalType"));
         }
 
         if (vm != null) {
-            if (vm.isIsweightvar()) {
+            if (vm.getBoolean("isWeightvar")) {
                 writeAttribute(xmlw, "wgt", "wgt");
             }
-            if (vm.isWeighted() && vm.getWeightvariable() != null) {
-                writeAttribute(xmlw, "wgt-var", "v"+vm.getWeightvariable().getId().toString());
+            if (vm.containsKey("isWeighted") && vm.containsKey("weightVariableId")) {
+                writeAttribute(xmlw, "wgt-var", "v"+vm.getString("weightVariableId"));
             }
         }
 
         // location
         xmlw.writeEmptyElement("location");
-        if (dv.getFileStartPosition() != null) {
-            writeAttribute(xmlw, "StartPos", dv.getFileStartPosition().toString());
+        if (dvar.containsKey("fileStartPosition")) {
+            writeAttribute(xmlw, "StartPos", dvar.getJsonNumber("fileStartPosition").toString());
         }
-        if (dv.getFileEndPosition() != null) {
-            writeAttribute(xmlw, "EndPos", dv.getFileEndPosition().toString());
+        if (dvar.containsKey("fileEndPosition")) {
+            writeAttribute(xmlw, "EndPos", dvar.getJsonNumber("fileEndPosition").toString());
         }
-        if (dv.getRecordSegmentNumber() != null) {
-            writeAttribute(xmlw, "RecSegNo", dv.getRecordSegmentNumber().toString());
+        if (dvar.containsKey("recordSegmentNumber")) {
+            writeAttribute(xmlw, "RecSegNo", dvar.getJsonNumber("recordSegmentNumber").toString());
         }
 
-        writeAttribute(xmlw, "fileid", "f" + dv.getDataTable().getDataFile().getId().toString());
+        writeAttribute(xmlw, "fileid", "f" + fileId);
 
         // labl
-        if ((vm == null || StringUtilisEmpty(vm.getLabel())) && !StringUtilisEmpty(dv.getLabel())) {
-            xmlw.writeStartElement("labl");
-            writeAttribute(xmlw, "level", "variable");
-            xmlw.writeCharacters(dv.getLabel());
-            xmlw.writeEndElement(); //labl
-        } else if (vm != null && !StringUtilisEmpty(vm.getLabel())) {
+        if ((vm == null || !vm.containsKey("label"))) {
+            if(dvar.containsKey("label")) {
+                xmlw.writeStartElement("labl");
+                writeAttribute(xmlw, "level", "variable");
+                xmlw.writeCharacters(dvar.getString("label"));
+                xmlw.writeEndElement(); //labl
+            }
+        } else {
             xmlw.writeStartElement("labl");
             writeAttribute(xmlw, "level", "variable");
-            xmlw.writeCharacters(vm.getLabel());
+            xmlw.writeCharacters(vm.getString("label"));
             xmlw.writeEndElement(); //labl
         }
 
         if (vm != null) {
-            if (!StringUtilisEmpty(vm.getLiteralquestion()) || !StringUtilisEmpty(vm.getInterviewinstruction()) || !StringUtilisEmpty(vm.getPostquestion())) {
+            if (vm.containsKey("literalQuestion") || vm.containsKey("interviewInstruction") || vm.containsKey("postQuestion")) {
                 xmlw.writeStartElement("qstn");
-                if (!StringUtilisEmpty(vm.getLiteralquestion())) {
+                if (vm.containsKey("literalQuestion")) {
                     xmlw.writeStartElement("qstnLit");
-                    xmlw.writeCharacters(vm.getLiteralquestion());
+                    xmlw.writeCharacters(vm.getString("literalQuestion"));
                     xmlw.writeEndElement(); // qstnLit
                 }
-                if (!StringUtilisEmpty(vm.getInterviewinstruction())) {
+                if (vm.containsKey("interviewInstruction")) {
                     xmlw.writeStartElement("ivuInstr");
-                    xmlw.writeCharacters(vm.getInterviewinstruction());
+                    xmlw.writeCharacters(vm.getString("interviewInstruction"));
                     xmlw.writeEndElement(); //ivuInstr
                 }
-                if (!StringUtilisEmpty(vm.getPostquestion())) {
+                if (vm.containsKey("postQuestion")) {
                     xmlw.writeStartElement("postQTxt");
-                    xmlw.writeCharacters(vm.getPostquestion());
+                    xmlw.writeCharacters(vm.getString("postQuestion"));
                     xmlw.writeEndElement(); //ivuInstr
                 }
                 xmlw.writeEndElement(); //qstn
@@ -1721,137 +1854,150 @@ private static void createVarDDI(XMLStreamWriter xmlw, DataVariable dv, FileMeta
         }
 
         // invalrng
-        boolean invalrngAdded = false;
-        for (VariableRange range : dv.getInvalidRanges()) {
-            //if (range.getBeginValueType() != null && range.getBeginValueType().getName().equals(DB_VAR_RANGE_TYPE_POINT)) {
-            if (range.getBeginValueType() != null && range.isBeginValueTypePoint()) {
-                if (range.getBeginValue() != null) {
+        if (dvar.containsKey("invalidRanges")) {
+            boolean invalrngAdded = false;
+            JsonArray ranges = dvar.getJsonArray("invalidRanges");
+            for (int i = 0; i < ranges.size(); i++) {
+                JsonObject range = ranges.getJsonObject(0);
+                // if (range.getBeginValueType() != null &&
+                // range.getBeginValueType().getName().equals(DB_VAR_RANGE_TYPE_POINT)) {
+                if (range.getBoolean("hasBeginValueType") && range.getBoolean("isBeginValueTypePoint")) {
+                    if (range.containsKey("beginValue")) {
+                        invalrngAdded = checkParentElement(xmlw, "invalrng", invalrngAdded);
+                        xmlw.writeEmptyElement("item");
+                        writeAttribute(xmlw, "VALUE", range.getString("beginValue"));
+                    }
+                } else {
                     invalrngAdded = checkParentElement(xmlw, "invalrng", invalrngAdded);
-                    xmlw.writeEmptyElement("item");
-                    writeAttribute(xmlw, "VALUE", range.getBeginValue());
-                }
-            } else {
-                invalrngAdded = checkParentElement(xmlw, "invalrng", invalrngAdded);
-                xmlw.writeEmptyElement("range");
-                if (range.getBeginValueType() != null && range.getBeginValue() != null) {
-                    if (range.isBeginValueTypeMin()) {
-                        writeAttribute(xmlw, "min", range.getBeginValue());
-                    } else if (range.isBeginValueTypeMinExcl()) {
-                        writeAttribute(xmlw, "minExclusive", range.getBeginValue());
+                    xmlw.writeEmptyElement("range");
+                    if (range.getBoolean("hasBeginValueType") && range.containsKey("beginValue")) {
+                        if (range.getBoolean("isBeginValueTypeMin")) {
+                            writeAttribute(xmlw, "min", range.getString("beginValue"));
+                        } else if (range.getBoolean("isBeginValueTypeMinExcl")) {
+                            writeAttribute(xmlw, "minExclusive", range.getString("beginValue"));
+                        }
                     }
-                }
-                if (range.getEndValueType() != null && range.getEndValue() != null) {
-                    if (range.isEndValueTypeMax()) {
-                        writeAttribute(xmlw, "max", range.getEndValue());
-                    } else if (range.isEndValueTypeMaxExcl()) {
-                        writeAttribute(xmlw, "maxExclusive", range.getEndValue());
+                    if (range.getBoolean("hasEndValueType") && range.containsKey("endValue")) {
+                        if (range.getBoolean("isEndValueTypeMax")) {
+                            writeAttribute(xmlw, "max", range.getString("endValue"));
+                        } else if (range.getBoolean("isEndValueTypeMaxExcl")) {
+                            writeAttribute(xmlw, "maxExclusive", range.getString("endValue"));
+                        }
                     }
                 }
             }
-        }
-        if (invalrngAdded) {
-            xmlw.writeEndElement(); // invalrng
+            if (invalrngAdded) {
+                xmlw.writeEndElement(); // invalrng
+            }
         }
 
         //universe
         if (vm != null) {
-            if (!StringUtilisEmpty(vm.getUniverse())) {
+            if (vm.containsKey("universe")) {
                 xmlw.writeStartElement("universe");
-                xmlw.writeCharacters(vm.getUniverse());
+                xmlw.writeCharacters(vm.getString("universe"));
                 xmlw.writeEndElement(); //universe
             }
         }
 
-        //sum stats
-        for (SummaryStatistic sumStat : dv.getSummaryStatistics()) {
-            xmlw.writeStartElement("sumStat");
-            if (sumStat.getTypeLabel() != null) {
-                writeAttribute(xmlw, "type", sumStat.getTypeLabel());
-            } else {
-                writeAttribute(xmlw, "type", "unknown");
+        // sum stats
+        if (dvar.containsKey("summaryStatistics")) {
+            for (Entry<String, JsonValue> sumStat : dvar.getJsonObject("summaryStatistics").entrySet()) {
+                xmlw.writeStartElement("sumStat");
+                writeAttribute(xmlw, "type", sumStat.getKey());
+                xmlw.writeCharacters(((JsonString)sumStat.getValue()).getString());
+                xmlw.writeEndElement(); // sumStat
             }
-            xmlw.writeCharacters(sumStat.getValue());
-            xmlw.writeEndElement(); //sumStat
         }
 
         // categories
-        for (VariableCategory cat : dv.getCategories()) {
-            xmlw.writeStartElement("catgry");
-            if (cat.isMissing()) {
-                writeAttribute(xmlw, "missing", "Y");
-            }
-
-            // catValu
-            xmlw.writeStartElement("catValu");
-            xmlw.writeCharacters(cat.getValue());
-            xmlw.writeEndElement(); //catValu
+        if (dvar.containsKey("variableCategories")) {
+            JsonArray varCats = dvar.getJsonArray("variableCategories");
+            for (int i = 0; i < varCats.size(); i++) {
+                JsonObject varCat = varCats.getJsonObject(i);
+                xmlw.writeStartElement("catgry");
+                if (varCat.getBoolean("isMissing")) {
+                    writeAttribute(xmlw, "missing", "Y");
+                }
 
-            // label
-            if (!StringUtilisEmpty(cat.getLabel())) {
-                xmlw.writeStartElement("labl");
-                writeAttribute(xmlw, "level", "category");
-                xmlw.writeCharacters(cat.getLabel());
-                xmlw.writeEndElement(); //labl
-            }
+                // catValu
+                xmlw.writeStartElement("catValu");
+                xmlw.writeCharacters(varCat.getString("value"));
+                xmlw.writeEndElement(); // catValu
+
+                // label
+                if (varCat.containsKey("label")) {
+                    xmlw.writeStartElement("labl");
+                    writeAttribute(xmlw, "level", "category");
+                    xmlw.writeCharacters(varCat.getString("label"));
+                    xmlw.writeEndElement(); // labl
+                }
 
-            // catStat
-            if (cat.getFrequency() != null) {
-                xmlw.writeStartElement("catStat");
-                writeAttribute(xmlw, "type", "freq");
-                // if frequency is actually a long value, we want to write "100" instead of "100.0"
-                if (Math.floor(cat.getFrequency()) == cat.getFrequency()) {
-                    xmlw.writeCharacters(new Long(cat.getFrequency().longValue()).toString());
-                } else {
-                    xmlw.writeCharacters(cat.getFrequency().toString());
+                // catStat
+                if (varCat.containsKey("frequency")) {
+                    xmlw.writeStartElement("catStat");
+                    writeAttribute(xmlw, "type", "freq");
+                    Double freq = varCat.getJsonNumber("frequency").doubleValue();
+                    // if frequency is actually a long value, we want to write "100" instead of
+                    // "100.0"
+                    if (Math.floor(freq) == freq) {
+                        xmlw.writeCharacters(Long.valueOf(freq.longValue()).toString());
+                    } else {
+                        xmlw.writeCharacters(freq.toString());
+                    }
+                    xmlw.writeEndElement(); // catStat
                 }
-                xmlw.writeEndElement(); //catStat
-            }
 
-            //catStat weighted freq
-            if (vm != null && vm.isWeighted()) {
-                for (CategoryMetadata cm : vm.getCategoriesMetadata()) {
-                    if (cm.getCategory().getValue().equals(cat.getValue())) {
-                        xmlw.writeStartElement("catStat");
-                        writeAttribute(xmlw, "wgtd", "wgtd");
-                        writeAttribute(xmlw, "type", "freq");
-                        xmlw.writeCharacters(cm.getWfreq().toString());
-                        xmlw.writeEndElement(); //catStat
-                        break;
+                // catStat weighted freq
+                if (vm != null && vm.getBoolean("isWeighted")) {
+                    JsonArray catMetas = vm.getJsonArray("categoryMetadatas");
+                    for (int j = 0; i < catMetas.size(); j++) {
+                        JsonObject cm = catMetas.getJsonObject(j);
+                        if (cm.getString("categoryValue").equals(varCat.getString("value"))) {
+                            xmlw.writeStartElement("catStat");
+                            writeAttribute(xmlw, "wgtd", "wgtd");
+                            writeAttribute(xmlw, "type", "freq");
+                            xmlw.writeCharacters(cm.getJsonNumber("wFreq").toString());
+                            xmlw.writeEndElement(); // catStat
+                            break;
+                        }
                     }
                 }
-            }
 
-            xmlw.writeEndElement(); //catgry
+                xmlw.writeEndElement(); // catgry
+            }
         }
 
 
         // varFormat
         xmlw.writeEmptyElement("varFormat");
-        if (dv.isTypeNumeric()) {
-            writeAttribute(xmlw, "type", "numeric");
-        } else if (dv.isTypeCharacter()) {
-            writeAttribute(xmlw, "type", "character");
+        if(dvar.containsKey("variableFormatType")) {
+            writeAttribute(xmlw, "type", dvar.getString("variableFormatType").toLowerCase());
         } else {
             throw new XMLStreamException("Illegal Variable Format Type!");
         }
-        writeAttribute(xmlw, "formatname", dv.getFormat());
+        if(dvar.containsKey("format")) {
+            writeAttribute(xmlw, "formatname", dvar.getString("format"));
+        }
         //experiment writeAttribute(xmlw, "schema", dv.getFormatSchema());
-        writeAttribute(xmlw, "category", dv.getFormatCategory());
+        if(dvar.containsKey("formatCategory")) {
+            writeAttribute(xmlw, "category", dvar.getString("formatCategory"));
+        }
 
         // notes
-        if (dv.getUnf() != null && !"".equals(dv.getUnf())) {
+        if (dvar.containsKey("UNF") && !dvar.getString("UNF").isBlank()) {
             xmlw.writeStartElement("notes");
             writeAttribute(xmlw, "subject", "Universal Numeric Fingerprint");
             writeAttribute(xmlw, "level", "variable");
             writeAttribute(xmlw, "type", "Dataverse:UNF");
-            xmlw.writeCharacters(dv.getUnf());
+            xmlw.writeCharacters(dvar.getString("UNF"));
             xmlw.writeEndElement(); //notes
         }
 
         if (vm != null) {
-            if (!StringUtilisEmpty(vm.getNotes())) {
+            if (vm.containsKey("notes")) {
                 xmlw.writeStartElement("notes");
-                xmlw.writeCData(vm.getNotes());
+                xmlw.writeCData(vm.getString("notes"));
                 xmlw.writeEndElement(); //notes CDATA
             }
         }
@@ -1862,40 +2008,45 @@ private static void createVarDDI(XMLStreamWriter xmlw, DataVariable dv, FileMeta
 
     }
     
-    private static void createFileDscr(XMLStreamWriter xmlw, DatasetVersion datasetVersion) throws XMLStreamException {
+    private static void createFileDscr(XMLStreamWriter xmlw, JsonArray fileDetails) throws XMLStreamException {
         String dataverseUrl = SystemConfig.getDataverseSiteUrlStatic();
-        for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) {
-            DataFile dataFile = fileMetadata.getDataFile();
-
-            if (dataFile != null && dataFile.isTabularData()) {
-                DataTable dt = dataFile.getDataTable();
+        for (int i =0;i<fileDetails.size();i++) {
+            JsonObject fileJson = fileDetails.getJsonObject(i);
+            //originalFileFormat is one of several keys that only exist for tabular data
+            if (fileJson.containsKey("originalFileFormat")) {
+                JsonObject dt = null;
+                if (fileJson.containsKey("dataTables")) {
+                    dt = fileJson.getJsonArray("dataTables").getJsonObject(0);
+                }
                 xmlw.writeStartElement("fileDscr");
-                writeAttribute(xmlw, "ID", "f" + dataFile.getId());
-                writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + dataFile.getId());
+                String fileId = fileJson.getJsonNumber("id").toString();
+                writeAttribute(xmlw, "ID", "f" + fileId);
+                writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + fileId);
 
                 xmlw.writeStartElement("fileTxt");
                 xmlw.writeStartElement("fileName");
-                xmlw.writeCharacters(fileMetadata.getLabel());
+                xmlw.writeCharacters(fileJson.getString("filename"));
                 xmlw.writeEndElement(); // fileName
 
-                if (dt.getCaseQuantity() != null || dt.getVarQuantity() != null || dt.getRecordsPerCase() != null) {
+                if (dt != null && (dt.containsKey("caseQuantity") || dt.containsKey("varQuantity")
+                        || dt.containsKey("recordsPerCase"))) {
                     xmlw.writeStartElement("dimensns");
 
-                    if (dt.getCaseQuantity() != null) {
+                    if (dt.containsKey("caseQuantity")) {
                         xmlw.writeStartElement("caseQnty");
-                        xmlw.writeCharacters(dt.getCaseQuantity().toString());
+                        xmlw.writeCharacters(dt.getJsonNumber("caseQuantity").toString());
                         xmlw.writeEndElement(); // caseQnty
                     }
 
-                    if (dt.getVarQuantity() != null) {
+                    if (dt.containsKey("varQuantity")) {
                         xmlw.writeStartElement("varQnty");
-                        xmlw.writeCharacters(dt.getVarQuantity().toString());
+                        xmlw.writeCharacters(dt.getJsonNumber("varQuantity").toString());
                         xmlw.writeEndElement(); // varQnty
                     }
 
-                    if (dt.getRecordsPerCase() != null) {
+                    if (dt.containsKey("recordsPerCase")) {
                         xmlw.writeStartElement("recPrCas");
-                        xmlw.writeCharacters(dt.getRecordsPerCase().toString());
+                        xmlw.writeCharacters(dt.getJsonNumber("recordsPerCase").toString());
                         xmlw.writeEndElement(); // recPrCas
                     }
 
@@ -1903,7 +2054,7 @@ private static void createFileDscr(XMLStreamWriter xmlw, DatasetVersion datasetV
                 }
 
                 xmlw.writeStartElement("fileType");
-                xmlw.writeCharacters(dataFile.getContentType());
+                xmlw.writeCharacters(fileJson.getString("contentType"));
                 xmlw.writeEndElement(); // fileType
 
                 xmlw.writeEndElement(); // fileTxt
@@ -1911,22 +2062,23 @@ private static void createFileDscr(XMLStreamWriter xmlw, DatasetVersion datasetV
                 // various notes:
                 // this specially formatted note section is used to store the UNF
                 // (Universal Numeric Fingerprint) signature:
-                if (dt.getUnf() != null && !dt.getUnf().equals("")) {
+                if ((dt!=null) && (dt.containsKey("UNF") && !dt.getString("UNF").isBlank())) {
                     xmlw.writeStartElement("notes");
                     writeAttribute(xmlw, "level", LEVEL_FILE);
                     writeAttribute(xmlw, "type", NOTE_TYPE_UNF);
                     writeAttribute(xmlw, "subject", NOTE_SUBJECT_UNF);
-                    xmlw.writeCharacters(dt.getUnf());
+                    xmlw.writeCharacters(dt.getString("UNF"));
                     xmlw.writeEndElement(); // notes
                 }
 
-                if (dataFile.getTags() != null) {
-                    for (int i = 0; i < dataFile.getTags().size(); i++) {
+                if (fileJson.containsKey("tabularTags")) {
+                    JsonArray tags = fileJson.getJsonArray("tabularTags");
+                    for (int j = 0; j < tags.size(); j++) {
                         xmlw.writeStartElement("notes");
                         writeAttribute(xmlw, "level", LEVEL_FILE);
                         writeAttribute(xmlw, "type", NOTE_TYPE_TAG);
                         writeAttribute(xmlw, "subject", NOTE_SUBJECT_TAG);
-                        xmlw.writeCharacters(dataFile.getTags().get(i).getTypeLabel());
+                        xmlw.writeCharacters(tags.getString(j));
                         xmlw.writeEndElement(); // notes
                     }
                 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java
index 149c6791a7e..d0cd8a4cae7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilHelper.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.export.ddi;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java
index 4409d2340b1..6b7cb844f3e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtil.java
@@ -14,17 +14,16 @@
 import edu.harvard.iq.dataverse.api.dto.LicenseDTO;
 import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
-import edu.harvard.iq.dataverse.license.License;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
-import java.io.ByteArrayOutputStream;
+
 import java.io.OutputStream;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLOutputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
@@ -102,12 +101,12 @@ private static void createDC(XMLStreamWriter xmlw, DatasetDTO datasetDto, String
         String persistentAgency = datasetDto.getProtocol();
         String persistentAuthority = datasetDto.getAuthority();
         String persistentId = datasetDto.getIdentifier();
-        GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId);
+        GlobalId globalId = PidUtil.parseAsGlobalID(persistentAgency, persistentAuthority, persistentId);
   
         writeFullElement(xmlw, dcFlavor+":"+"title", dto2Primitive(version, DatasetFieldConstant.title));                       
         
         xmlw.writeStartElement(dcFlavor+":"+"identifier");
-        xmlw.writeCharacters(globalId.toURL().toString());
+        xmlw.writeCharacters(globalId.asURL());
         xmlw.writeEndElement(); // decterms:identifier       
 
         writeAuthorsElement(xmlw, version, dcFlavor);
@@ -160,12 +159,12 @@ private static void createOAIDC(XMLStreamWriter xmlw, DatasetDTO datasetDto, Str
         String persistentAgency = datasetDto.getProtocol();
         String persistentAuthority = datasetDto.getAuthority();
         String persistentId = datasetDto.getIdentifier();
-        GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId);
+        GlobalId globalId = PidUtil.parseAsGlobalID(persistentAgency, persistentAuthority, persistentId);
   
         writeFullElement(xmlw, dcFlavor+":"+"title", dto2Primitive(version, DatasetFieldConstant.title));                       
         
         xmlw.writeStartElement(dcFlavor+":"+"identifier");
-        xmlw.writeCharacters(globalId.toURL().toString());
+        xmlw.writeCharacters(globalId.asURL());
         xmlw.writeEndElement(); // decterms:identifier       
 
         writeAuthorsElement(xmlw, version, dcFlavor); //creator
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/Cleanup.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/Cleanup.java
deleted file mode 100644
index 508f441bc03..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/Cleanup.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package edu.harvard.iq.dataverse.export.openaire;
-
-import org.apache.commons.lang3.StringUtils;
-
-/**
- *
- * @author francesco.cadili@4science.it
- */
-public class Cleanup {
-    
-    /**
-     * Normalize sentence
-     *
-     * @param sentence full name or organization name
-     * @return normalize string value
-     */
-    static public String normalize(String sentence) {
-        if (StringUtils.isBlank(sentence)) {
-            return "";
-        }
-
-        sentence = sentence.trim()
-                .replaceAll(", *", ", ")
-                .replaceAll(" +", " ");
-
-        return sentence;
-    }
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java
index bea3858a60e..7b0a92a4372 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/export/openaire/OpenAireExportUtil.java
@@ -1,14 +1,10 @@
 package edu.harvard.iq.dataverse.export.openaire;
 
 import java.io.OutputStream;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
 import java.util.logging.Logger;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLOutputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamWriter;
@@ -17,18 +13,21 @@
 
 import com.google.gson.Gson;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.DatasetFieldConstant;
 import edu.harvard.iq.dataverse.GlobalId;
-import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
 import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
 import edu.harvard.iq.dataverse.api.dto.FieldDTO;
 import edu.harvard.iq.dataverse.api.dto.MetadataBlockDTO;
+import edu.harvard.iq.dataverse.util.PersonOrOrgUtil;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
 
 public class OpenAireExportUtil {
 
@@ -70,7 +69,7 @@ private static void createOpenAire(XMLStreamWriter xmlw, DatasetDTO datasetDto)
         String persistentAgency = datasetDto.getProtocol();
         String persistentAuthority = datasetDto.getAuthority();
         String persistentId = datasetDto.getIdentifier();
-        GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId);
+        GlobalId globalId = PidUtil.parseAsGlobalID(persistentAgency, persistentAuthority, persistentId);
 
         // The sequence is revied using sample:
         // https://schema.datacite.org/meta/kernel-4.0/example/datacite-example-full-v4.0.xml
@@ -82,7 +81,7 @@ private static void createOpenAire(XMLStreamWriter xmlw, DatasetDTO datasetDto)
         String language = null;
 
         // 1, Identifier (with mandatory type sub-property) (M)
-        writeIdentifierElement(xmlw, globalId.toURL().toString(), language);
+        writeIdentifierElement(xmlw, globalId.asURL(), language);
 
         // 2, Creator (with optional given name, family name, 
         //      name identifier and affiliation sub-properties) (M)
@@ -190,10 +189,10 @@ public static void writeIdentifierElement(XMLStreamWriter xmlw, String identifie
         if (StringUtils.isNotBlank(identifier)) {
             Map<String, String> identifier_map = new HashMap<String, String>();
 
-            if (StringUtils.containsIgnoreCase(identifier, GlobalId.DOI_RESOLVER_URL)) {
+            if (StringUtils.containsIgnoreCase(identifier, DOIServiceBean.DOI_RESOLVER_URL)) {
                 identifier_map.put("identifierType", "DOI");
                 identifier = StringUtils.substring(identifier, identifier.indexOf("10."));
-            } else if (StringUtils.containsIgnoreCase(identifier, GlobalId.HDL_RESOLVER_URL)) {
+            } else if (StringUtils.containsIgnoreCase(identifier, HandlenetServiceBean.HDL_RESOLVER_URL)) {
                 identifier_map.put("identifierType", "Handle");
                 if (StringUtils.contains(identifier, "http")) {
                     identifier = identifier.replace(identifier.substring(0, identifier.indexOf("/") + 2), "");
@@ -249,72 +248,26 @@ public static void writeCreatorsElement(XMLStreamWriter xmlw, DatasetVersionDTO
                             if (StringUtils.isNotBlank(creatorName)) {
                                 creator_check = writeOpenTag(xmlw, "creators", creator_check);
                                 xmlw.writeStartElement("creator"); // <creator>
-
-                                boolean nameType_check = false;
+                                
                                 Map<String, String> creator_map = new HashMap<String, String>();
-                                if ((StringUtils.containsIgnoreCase(nameIdentifierScheme, "orcid"))) {
+                                JsonObject creatorObj = PersonOrOrgUtil.getPersonOrOrganization(creatorName, false,
+                                        StringUtils.containsIgnoreCase(nameIdentifierScheme, "orcid"));
+
+                                // creatorName=<FamilyName>, <FirstName>
+                                if (creatorObj.getBoolean("isPerson")) {
                                     creator_map.put("nameType", "Personal");
-                                    nameType_check = true;
-                                }
-                                // ToDo - the algorithm to determine if this is a Person or Organization here
-                                // has been abstracted into a separate
-                                // edu.harvard.iq.dataverse.util.PersonOrOrgUtil class that could be used here
-                                // to avoid duplication/variants of the algorithm
-                                creatorName = Cleanup.normalize(creatorName);
-                                // Datacite algorithm, https://github.com/IQSS/dataverse/issues/2243#issuecomment-358615313
-                                if (creatorName.contains(",")) {
-                                    String givenName = FirstNames.getInstance().getFirstName(creatorName);
-                                    boolean isOrganization = Organizations.getInstance().isOrganization(creatorName);
-
-                                    // creatorName=<FamilyName>, <FirstName>
-                                    if (givenName != null && !isOrganization) {
-                                        // givenName ok
-                                        creator_map.put("nameType", "Personal");
-                                        nameType_check = true;
-                                    } else if (isOrganization) {
-                                        creator_map.put("nameType", "Organizational");
-                                        nameType_check = false;
-                                    }
-                                    writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language);
-
-                                    if ((nameType_check) && (!creatorName.replaceFirst(",", "").contains(","))) {
-                                        // creatorName=<FamilyName>, <FirstName>
-                                        String[] fullName = creatorName.split(", ");
-                                        if (fullName.length == 2) {
-                                            givenName = fullName[1];
-                                            String familyName = fullName[0];
-
-                                            writeFullElement(xmlw, null, "givenName", null, givenName, language);
-                                            writeFullElement(xmlw, null, "familyName", null, familyName, language);
-                                        } else {
-                                            // It's possible to get here if "Smith," is entered as an author name.
-                                            logger.info("Unable to write givenName and familyName based on creatorName '" + creatorName + "'.");
-                                        }
-                                    }
                                 } else {
-                                    String givenName = FirstNames.getInstance().getFirstName(creatorName);
-                                    boolean isOrganization = Organizations.getInstance().isOrganization(creatorName);
-                                    
-                                    if (givenName != null && !isOrganization) {
-                                        // givenName ok, creatorName=<FirstName> <FamilyName>
-                                        creator_map.put("nameType", "Personal");
-                                        nameType_check = true;
-                                        writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language);
-
-                                        String familyName = "";
-                                        if (givenName.length() + 1 < creatorName.length()) {
-                                            familyName = creatorName.substring(givenName.length() + 1);
-                                        }
-
-                                        writeFullElement(xmlw, null, "givenName", null, givenName, language);
-                                        writeFullElement(xmlw, null, "familyName", null, familyName, language);
-                                    } else {
-                                        // default
-                                        if (isOrganization) {
-                                            creator_map.put("nameType", "Organizational");
-                                        }
-                                        writeFullElement(xmlw, null, "creatorName", creator_map, creatorName, language);
-                                    }
+                                    creator_map.put("nameType", "Organizational");
+                                }
+                                writeFullElement(xmlw, null, "creatorName", creator_map,
+                                        creatorObj.getString("fullName"), language);
+                                if (creatorObj.containsKey("givenName")) {
+                                    writeFullElement(xmlw, null, "givenName", null, creatorObj.getString("givenName"),
+                                            language);
+                                }
+                                if (creatorObj.containsKey("familyName")) {
+                                    writeFullElement(xmlw, null, "familyName", null, creatorObj.getString("familyName"),
+                                            language);
                                 }
 
                                 if (StringUtils.isNotBlank(nameIdentifier)) {
@@ -368,12 +321,34 @@ public static void writeTitlesElement(XMLStreamWriter xmlw, DatasetVersionDTO da
         String subtitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.subTitle);
         title_check = writeTitleElement(xmlw, "Subtitle", subtitle, title_check, language);
 
-        String alternativeTitle = dto2Primitive(datasetVersionDTO, DatasetFieldConstant.alternativeTitle);
-        title_check = writeTitleElement(xmlw, "AlternativeTitle", alternativeTitle, title_check, language);
-
+        title_check = writeMultipleTitleElement(xmlw, "AlternativeTitle", datasetVersionDTO, "citation", title_check, language);
         writeEndTag(xmlw, title_check);
     }
 
+    private static boolean writeMultipleTitleElement(XMLStreamWriter xmlw, String titleType, DatasetVersionDTO datasetVersionDTO, String metadataBlockName, boolean title_check, String language) throws XMLStreamException {
+        MetadataBlockDTO block = datasetVersionDTO.getMetadataBlocks().get(metadataBlockName);
+        if (block != null) {
+            logger.fine("Block is not empty");
+            List<FieldDTO> fieldsBlock =  block.getFields();
+            if (fieldsBlock != null) {
+                for (FieldDTO fieldDTO : fieldsBlock) {
+                    logger.fine(titleType + " " + fieldDTO.getTypeName());
+                    if (titleType.toLowerCase().equals(fieldDTO.getTypeName().toLowerCase())) {
+                        logger.fine("Found Alt title");
+                        List<String> fields = fieldDTO.getMultiplePrimitive();
+                        for (String value : fields) {
+                            if (!writeTitleElement(xmlw, titleType, value, title_check, language))
+                                title_check = false;
+                        }
+                        break;
+                    }
+                }
+            }
+        }
+
+        return title_check;
+    }
+
     /**
      * 3, Title (with optional type sub-properties) (M)
      *
@@ -709,61 +684,23 @@ public static void writeContributorElement(XMLStreamWriter xmlw, String contribu
         boolean nameType_check = false;
         Map<String, String> contributor_map = new HashMap<String, String>();
 
-        // ToDo - the algorithm to determine if this is a Person or Organization here
-        // has been abstracted into a separate
-        // edu.harvard.iq.dataverse.util.PersonOrOrgUtil class that could be used here
-        // to avoid duplication/variants of the algorithm
-
-        contributorName = Cleanup.normalize(contributorName);
-        // Datacite algorithm, https://github.com/IQSS/dataverse/issues/2243#issuecomment-358615313
-        if (contributorName.contains(",")) {
-            String givenName = FirstNames.getInstance().getFirstName(contributorName);
-            boolean isOrganization = Organizations.getInstance().isOrganization(contributorName);
+        JsonObject contributorObj = PersonOrOrgUtil.getPersonOrOrganization(contributorName,
+                ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName)), false);
 
-            // contributorName=<FamilyName>, <FirstName>
-            if (givenName != null && !isOrganization) {
-                // givenName ok
+        if (contributorObj.getBoolean("isPerson")) {
+            if(contributorObj.containsKey("givenName")) {
                 contributor_map.put("nameType", "Personal");
-                nameType_check = true;
-                // re: the above toDo - the ("ContactPerson".equals(contributorType) &&
-                // !isValidEmailAddress(contributorName)) clause in the next line could/should
-                // be sent as the OrgIfTied boolean parameter
-            } else if (isOrganization || ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName))) {
-                contributor_map.put("nameType", "Organizational");
-            }
-            writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language);
-
-            if ((nameType_check) && (!contributorName.replaceFirst(",", "").contains(","))) {
-                // contributorName=<FamilyName>, <FirstName>
-                String[] fullName = contributorName.split(", ");
-                givenName = fullName[1];
-                String familyName = fullName[0];
-
-                writeFullElement(xmlw, null, "givenName", null, givenName, language);
-                writeFullElement(xmlw, null, "familyName", null, familyName, language);
             }
         } else {
-            String givenName = FirstNames.getInstance().getFirstName(contributorName);
-            boolean isOrganization = Organizations.getInstance().isOrganization(contributorName);
-
-            if (givenName != null && !isOrganization) {
-                contributor_map.put("nameType", "Personal");
-                writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language);
-
-                String familyName = "";
-                if (givenName.length() + 1 < contributorName.length()) {
-                    familyName = contributorName.substring(givenName.length() + 1);
-                }
+            contributor_map.put("nameType", "Organizational");
+        }
+        writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language);
 
-                writeFullElement(xmlw, null, "givenName", null, givenName, language);
-                writeFullElement(xmlw, null, "familyName", null, familyName, language);
-            } else {
-                // default
-                if (isOrganization || ("ContactPerson".equals(contributorType) && !isValidEmailAddress(contributorName))) {
-                    contributor_map.put("nameType", "Organizational");
-                }
-                writeFullElement(xmlw, null, "contributorName", contributor_map, contributorName, language);
-            }
+        if (contributorObj.containsKey("givenName")) {
+            writeFullElement(xmlw, null, "givenName", null, contributorObj.getString("givenName"), language);
+        }
+        if (contributorObj.containsKey("familyName")) {
+            writeFullElement(xmlw, null, "familyName", null, contributorObj.getString("familyName"), language);
         }
 
         if (StringUtils.isNotBlank(contributorAffiliation)) {
@@ -1263,26 +1200,17 @@ public static void writeDescriptionsElement(XMLStreamWriter xmlw, DatasetVersion
                     if (DatasetFieldConstant.series.equals(fieldDTO.getTypeName())) {
                         // String seriesName = null;
                         String seriesInformation = null;
-
-                        Set<FieldDTO> fieldDTOs = fieldDTO.getSingleCompound();
-                        for (Iterator<FieldDTO> iterator = fieldDTOs.iterator(); iterator.hasNext();) {
-                            FieldDTO next = iterator.next();
-                            /*if (DatasetFieldConstant.seriesName.equals(next.getTypeName())) {
-                                seriesName =  next.getSinglePrimitive();
-                            }*/
-                            if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) {
-                                seriesInformation = next.getSinglePrimitive();
+                        for (HashSet<FieldDTO> fieldDTOs : fieldDTO.getMultipleCompound()) {
+                            for (Iterator<FieldDTO> iterator = fieldDTOs.iterator(); iterator.hasNext();) {
+                                FieldDTO next = iterator.next();
+                                if (DatasetFieldConstant.seriesInformation.equals(next.getTypeName())) {
+                                    seriesInformation = next.getSinglePrimitive();
+                                }
+                            }
+                            if (StringUtils.isNotBlank(seriesInformation)) {
+                                description_check = writeOpenTag(xmlw, "descriptions", description_check);
+                                writeDescriptionElement(xmlw, "SeriesInformation", seriesInformation, language);
                             }
-                        }
-
-                        /*if (StringUtils.isNotBlank(seriesName)){
-                        	contributor_check = writeOpenTag(xmlw, "descriptions", description_check);
-                        	
-                        	writeDescriptionElement(xmlw, "SeriesInformation", seriesName);
-                        }*/
-                        if (StringUtils.isNotBlank(seriesInformation)) {
-                            description_check = writeOpenTag(xmlw, "descriptions", description_check);
-                            writeDescriptionElement(xmlw, "SeriesInformation", seriesInformation, language);
                         }
                     }
                 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/spi/Exporter.java b/src/main/java/edu/harvard/iq/dataverse/export/spi/Exporter.java
deleted file mode 100644
index e44265946f6..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/export/spi/Exporter.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * To change this license header, choose License Headers in Project Properties.
- * To change this template file, choose Tools | Templates
- * and open the template in the editor.
- */
-package edu.harvard.iq.dataverse.export.spi;
-
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.export.ExportException;
-import java.io.OutputStream;
-import javax.json.JsonObject;
-import javax.ws.rs.core.MediaType;
-
-/**
- *
- * @author skraffmi
- */
-public interface Exporter {
-    
-    /* When implementing exportDataset, when done writing content, please make sure to flush() the outputStream, 
-       but NOT close() it!
-       This way an exporter can be used to insert the produced metadata into the 
-       body of an HTTP response, etc. (for example, to insert it into the body 
-       of an OAI response, where more XML needs to be written, for the outer 
-       OAI-PMH record). -- L.A.  4.5
-    */
-    //public void exportDataset(JsonObject json, OutputStream outputStream) throws ExportException;
-    
-    public void exportDataset(DatasetVersion version, JsonObject json, OutputStream outputStream) throws ExportException;
-    
-    public String getProviderName();
-    
-    public String getDisplayName();
-    
-    public Boolean isXMLFormat();
-    
-    public Boolean isHarvestable();
-    
-    public Boolean isAvailableToUsers();
-    
-    /* These should throw an ExportException if called on an Exporter that is not isXMLFormat(): */
-    public String getXMLNameSpace() throws ExportException;
-    public String getXMLSchemaLocation() throws ExportException; 
-    public String getXMLSchemaVersion() throws ExportException; 
-    
-    public void setParam(String name, Object value);
-
-	public default String getMediaType() {
-	    return MediaType.APPLICATION_XML;
-	};
-    
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java
index 0a238eb5198..7f1f46c06cb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java
+++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalTool.java
@@ -6,20 +6,20 @@
 import java.io.Serializable;
 import java.util.Arrays;
 import java.util.List;
-import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToMany;
+
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToMany;
 
 /**
  * A specification or definition for how an external tool is intended to
@@ -148,7 +148,8 @@ public enum Type {
 
         EXPLORE("explore"),
         CONFIGURE("configure"),
-        PREVIEW("preview");
+        PREVIEW("preview"),
+        QUERY("query");
 
         private final String text;
 
@@ -238,14 +239,30 @@ public void setExternalToolTypes(List<ExternalToolType> externalToolTypes) {
     }
 
     public boolean isExploreTool() {
-        boolean isExploreTool = false;
         for (ExternalToolType externalToolType : externalToolTypes) {
             if (externalToolType.getType().equals(Type.EXPLORE)) {
-                isExploreTool = true;
-                break;
+                return true;
+            }
+        }
+        return false;
+    }
+    
+    public boolean isQueryTool() {
+        for (ExternalToolType externalToolType : externalToolTypes) {
+            if (externalToolType.getType().equals(Type.QUERY)) {
+                return true;
+            }
+        }
+        return false;
+    }
+    
+    public boolean isPreviewTool() {
+        for (ExternalToolType externalToolType : externalToolTypes) {
+            if (externalToolType.getType().equals(Type.PREVIEW)) {
+                return true;
             }
         }
-        return isExploreTool;
+        return false;
     }
 
     public Scope getScope() {
diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java
index 88a51017b75..e7ae451cacf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java
+++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java
@@ -22,18 +22,16 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonNumber;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.ws.rs.HttpMethod;
+import jakarta.json.JsonNumber;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.ws.rs.HttpMethod;
 
 import org.apache.commons.codec.binary.StringUtils;
 
+import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST;
+
 /**
  * Handles an operation on a specific file. Requires a file id in order to be
  * instantiated. Applies logic based on an {@link ExternalTool} specification,
@@ -41,15 +39,10 @@
  */
 public class ExternalToolHandler extends URLTokenUtil {
 
-    private final ExternalTool externalTool;
+    public final ExternalTool externalTool;
 
     private String requestMethod;
-    
-    public static final String HTTP_METHOD="httpMethod";
-    public static final String TIMEOUT="timeOut";
-    public static final String SIGNED_URL="signedUrl";
-    public static final String NAME="name";
-    public static final String URL_TEMPLATE="urlTemplate";
+
     
 
     /**
@@ -110,7 +103,8 @@ public String handleRequest(boolean preview) {
                 switch (externalTool.getScope()) {
                 case DATASET:
                     callback=SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/"
-                            + dataset.getId() + "/versions/:latest/toolparams/" + externalTool.getId();
+                            + dataset.getId() + "/versions/" + DS_VERSION_LATEST + "/toolparams/" + externalTool.getId();
+                    break;
                 case FILE:
                     callback= SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/files/"
                             + dataFile.getId() + "/metadata/" + fileMetadata.getId() + "/toolparams/"
@@ -133,12 +127,12 @@ public String handleRequest(boolean preview) {
 
         } else {
             // ToDo - if the allowedApiCalls() are defined, could/should we send them to
-            // tools using GET as well?
+            // tools using POST as well?
 
             if (requestMethod.equals(HttpMethod.POST)) {
-                String body = JsonUtil.prettyPrint(createPostBody(params).build());
+                String body = JsonUtil.prettyPrint(createPostBody(params, null).build());
                 try {
-                    logger.info("POST Body: " + body);
+                    logger.fine("POST Body: " + body);
                     return postFormData(body);
                 } catch (IOException | InterruptedException ex) {
                     Logger.getLogger(ExternalToolHandler.class.getName()).log(Level.SEVERE, null, ex);
@@ -148,60 +142,6 @@ public String handleRequest(boolean preview) {
         return null;
     }
 
-    public JsonObject getParams(JsonObject toolParameters) {
-        //ToDo - why an array of object each with a single key/value pair instead of one object?
-        JsonArray queryParams = toolParameters.getJsonArray("queryParameters");
-
-        // ToDo return json and print later
-        JsonObjectBuilder paramsBuilder = Json.createObjectBuilder();
-        if (!(queryParams == null) && !queryParams.isEmpty()) {
-            queryParams.getValuesAs(JsonObject.class).forEach((queryParam) -> {
-                queryParam.keySet().forEach((key) -> {
-                    String value = queryParam.getString(key);
-                    JsonValue param = getParam(value);
-                    if (param != null) {
-                        paramsBuilder.add(key, param);
-                    }
-                });
-            });
-        }
-        return paramsBuilder.build();
-    }
-
-    public JsonObjectBuilder createPostBody(JsonObject params) {
-        JsonObjectBuilder bodyBuilder = Json.createObjectBuilder();
-        bodyBuilder.add("queryParameters", params);
-        String apiCallStr = externalTool.getAllowedApiCalls();
-        if (apiCallStr != null && !apiCallStr.isBlank()) {
-            JsonArray apiArray = JsonUtil.getJsonArray(externalTool.getAllowedApiCalls());
-            JsonArrayBuilder apisBuilder = Json.createArrayBuilder();
-            apiArray.getValuesAs(JsonObject.class).forEach(((apiObj) -> {
-                logger.fine(JsonUtil.prettyPrint(apiObj));
-                String name = apiObj.getJsonString(NAME).getString();
-                String httpmethod = apiObj.getJsonString(HTTP_METHOD).getString();
-                int timeout = apiObj.getInt(TIMEOUT);
-                String urlTemplate = apiObj.getJsonString(URL_TEMPLATE).getString();
-                logger.fine("URL Template: " + urlTemplate);
-                urlTemplate = SystemConfig.getDataverseSiteUrlStatic() + urlTemplate;
-                String apiPath = replaceTokensWithValues(urlTemplate);
-                logger.fine("URL WithTokens: " + apiPath);
-                String url = apiPath;
-                // Sign if apiToken exists, otherwise send unsigned URL (i.e. for guest users)
-                ApiToken apiToken = getApiToken();
-                if (apiToken != null) {
-                    url = UrlSignerUtil.signUrl(apiPath, timeout, apiToken.getAuthenticatedUser().getUserIdentifier(),
-                            httpmethod, JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("")
-                                    + getApiToken().getTokenString());
-                }
-                logger.fine("Signed URL: " + url);
-                apisBuilder.add(Json.createObjectBuilder().add(NAME, name).add(HTTP_METHOD, httpmethod)
-                        .add(SIGNED_URL, url).add(TIMEOUT, timeout));
-            }));
-            bodyBuilder.add("signedUrls", apisBuilder);
-        }
-        return bodyBuilder;
-    }
-
     private String postFormData(String allowedApis) throws IOException, InterruptedException {
         String url = null;
         HttpClient client = HttpClient.newHttpClient();
@@ -252,4 +192,11 @@ public String getExploreScript() {
         logger.fine("Exploring with " + toolUrl);
         return getScriptForUrl(toolUrl);
     }
+
+    // TODO: Consider merging with getExploreScript
+    public String getConfigureScript() {
+        String toolUrl = this.getToolUrlWithQueryParams();
+        logger.fine("Configuring with " + toolUrl);
+        return getScriptForUrl(toolUrl);
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java
index f38cd7301ee..e13843eadfa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBean.java
@@ -6,34 +6,31 @@
 import edu.harvard.iq.dataverse.DataFileServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool.Type;
-import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.URLTokenUtil.ReservedWord;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.externaltools.ExternalTool.Scope;
 
-import java.io.StringReader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 import static edu.harvard.iq.dataverse.externaltools.ExternalTool.*;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import javax.ejb.EJB;
-import javax.json.JsonValue;
+
+import jakarta.ejb.EJB;
+import jakarta.json.JsonValue;
 
 @Stateless
 @Named
@@ -62,7 +59,7 @@ public List<ExternalTool> findDatasetToolsByType(Type type) {
     }
 
     /**
-     * @param type explore, configure or preview
+     * @param type explore, configure or preview, query
      * @return A list of tools or an empty list.
      */
     public List<ExternalTool> findFileToolsByType(Type type) {
@@ -71,7 +68,7 @@ public List<ExternalTool> findFileToolsByType(Type type) {
     }
 
     /**
-     * @param type explore, configure or preview
+     * @param type explore, configure or preview, query
      * @param contentType file content type (MIME type)
      * @return A list of tools or an empty list.
      */
diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java
index 3564d1871b5..fb4c0f5dc5d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java
+++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolType.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse.externaltools;
 
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 @Entity
 @Table(indexes = {
diff --git a/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java b/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java
index e8677869496..c1162eb8db6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java
+++ b/src/main/java/edu/harvard/iq/dataverse/feedback/Feedback.java
@@ -1,18 +1,22 @@
 package edu.harvard.iq.dataverse.feedback;
 
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+
+import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 
 public class Feedback {
 
     private final String fromEmail;
     private final String toEmail;
+    private final String ccEmail;
     private final String subject;
     private final String body;
 
-    public Feedback(String fromEmail, String toEmail, String subject, String body) {
+    public Feedback(String fromEmail, String toEmail, String ccEmail, String subject, String body) {
         this.fromEmail = fromEmail;
         this.toEmail = toEmail;
+        this.ccEmail=ccEmail;
         this.subject = subject;
         this.body = body;
     }
@@ -24,6 +28,10 @@ public String getFromEmail() {
     public String getToEmail() {
         return toEmail;
     }
+    
+    public String getCcEmail() {
+        return ccEmail;
+    }
 
     public String getSubject() {
         return subject;
@@ -35,13 +43,14 @@ public String getBody() {
 
     @Override
     public String toString() {
-        return "Feedback{" + "fromEmail=" + fromEmail + ", toEmail=" + toEmail + ", subject=" + subject + ", body=" + body + '}';
+        return "Feedback{" + "fromEmail=" + fromEmail + ", toEmail=" + toEmail + ", ccEmail=" + ccEmail + ", subject=" + subject + ", body=" + body + '}';
     }
 
     public JsonObjectBuilder toJsonObjectBuilder() {
-        return Json.createObjectBuilder()
+        return new NullSafeJsonBuilder()
                 .add("fromEmail", fromEmail)
                 .add("toEmail", toEmail)
+                .add("ccEmail", ccEmail)
                 .add("subject", subject)
                 .add("body", body);
     }
diff --git a/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java b/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java
index 8b23d68f4b7..6ae0e165141 100644
--- a/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/feedback/FeedbackUtil.java
@@ -10,11 +10,15 @@
 import edu.harvard.iq.dataverse.DataverseSession;
 import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.util.BundleUtil;
+import edu.harvard.iq.dataverse.util.PersonOrOrgUtil;
+
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.mail.internet.InternetAddress;
+
+import jakarta.json.JsonObject;
+import jakarta.mail.internet.InternetAddress;
 
 public class FeedbackUtil {
 
@@ -22,87 +26,118 @@ public class FeedbackUtil {
 
     private static final String NO_DATASET_CONTACT_INTRO = BundleUtil.getStringFromBundle("contact.context.dataset.noContact");
 
-    // TODO: consider changing "recipient" into an object called something like FeedbackTarget
-    public static List<Feedback> gatherFeedback(DvObject recipient, DataverseSession dataverseSession, String messageSubject, String userMessage, InternetAddress systemAddress, String userEmail, String dataverseSiteUrl, String installationBrandName, String supportTeamName) {
+    public static Feedback gatherFeedback(DvObject feedbackTarget, DataverseSession dataverseSession, String messageSubject, String userMessage, InternetAddress systemAddress, String userEmail, String dataverseSiteUrl, String installationBrandName, String supportTeamName, boolean ccSupport) {
         String systemEmail = null;
         if (systemAddress != null) {
             systemEmail = systemAddress.getAddress();
         }
         logger.fine("systemAddress: " + systemAddress);
-        List<Feedback> feedbacks = new ArrayList<>();
+        Feedback feedback = null;
         if (isLoggedIn(dataverseSession)) {
             userEmail = loggedInUserEmail(dataverseSession);
         }
-        if (recipient != null) {
+        String contextIntro;
+        String contextEnding;
+        String contactEmails;
+        String ccEmails = ccSupport ? systemEmail : null;
+
+        if (feedbackTarget != null) {
             messageSubject = BundleUtil.getStringFromBundle("contact.context.subject.dvobject", Arrays.asList(installationBrandName, messageSubject));
-            if (recipient.isInstanceofDataverse()) {
-                Dataverse dataverse = (Dataverse) recipient;
-                String dataverseContextEnding = BundleUtil.getStringFromBundle("contact.context.dataverse.ending", Arrays.asList(supportTeamName, systemEmail, dataverseSiteUrl, dataverse.getAlias(), supportTeamName, systemEmail));
-                List<DvObjectContact> dataverseContacts = getDataverseContacts(dataverse);
-                for (DvObjectContact dataverseContact : dataverseContacts) {
-                    String placeHolderIfDataverseContactsGetNames = "";
-                    String dataverseContextIntro = BundleUtil.getStringFromBundle("contact.context.dataverse.intro", Arrays.asList(placeHolderIfDataverseContactsGetNames, userEmail, installationBrandName, dataverse.getAlias()));
-                    Feedback feedback = new Feedback(userEmail, dataverseContact.getEmail(), messageSubject, dataverseContextIntro + userMessage + dataverseContextEnding);
-                    feedbacks.add(feedback);
+
+            String contactGreeting;
+
+            if (feedbackTarget.isInstanceofDataverse()) {
+                // Dataverse target
+                Dataverse dataverse = (Dataverse) feedbackTarget;
+                contextEnding = BundleUtil.getStringFromBundle("contact.context.dataverse.ending", Arrays.asList(supportTeamName, systemEmail, dataverseSiteUrl, dataverse.getAlias(), supportTeamName, systemEmail));
+                List<DvObjectContact> contacts = getDataverseContacts(dataverse);
+                List<String> contactEmailList = new ArrayList<String>();
+                for (DvObjectContact contact : contacts) {
+                    contactEmailList.add(contact.getEmail());
                 }
-                if (!feedbacks.isEmpty()) {
-                    return feedbacks;
+                if (!contactEmailList.isEmpty()) {
+                    contactEmails = String.join(",", contactEmailList);
+                    // Dataverse contacts do not have a name, just email address
+                    contactGreeting = "";
+                    contextIntro = BundleUtil.getStringFromBundle("contact.context.dataverse.intro", Arrays.asList(contactGreeting, userEmail, installationBrandName, dataverse.getAlias()));
                 } else {
-                    String dataverseContextIntroError = BundleUtil.getStringFromBundle("contact.context.dataverse.noContact");
-                    Feedback feedback = new Feedback(userEmail, systemEmail, messageSubject, dataverseContextIntroError + userMessage + dataverseContextEnding);
-                    feedbacks.add(feedback);
-                    return feedbacks;
+                    // No contacts
+                    contextIntro = BundleUtil.getStringFromBundle("contact.context.dataverse.noContact");
+                    contactEmails = systemEmail;
+                    ccEmails = null;
                 }
-            } else if (recipient.isInstanceofDataset()) {
-                Dataset dataset = (Dataset) recipient;
+            } else if (feedbackTarget.isInstanceofDataset()) {
+                // Dataset target
+                Dataset dataset = (Dataset) feedbackTarget;
                 String datasetTitle = dataset.getLatestVersion().getTitle();
-                String datasetPid = dataset.getGlobalIdString();
-                String datasetContextEnding = BundleUtil.getStringFromBundle("contact.context.dataset.ending", Arrays.asList(supportTeamName, systemEmail, dataverseSiteUrl, dataset.getGlobalIdString(), supportTeamName, systemEmail));
-                List<DvObjectContact> datasetContacts = getDatasetContacts(dataset);
-                for (DvObjectContact datasetContact : datasetContacts) {
-                    String contactFullName = getGreeting(datasetContact);
-                    String datasetContextIntro = BundleUtil.getStringFromBundle("contact.context.dataset.intro", Arrays.asList(contactFullName, userEmail, installationBrandName, datasetTitle, datasetPid));
-                    Feedback feedback = new Feedback(userEmail, datasetContact.getEmail(), messageSubject, datasetContextIntro + userMessage + datasetContextEnding);
-                    feedbacks.add(feedback);
+                String datasetPid = dataset.getGlobalId().asString();
+                contextEnding = BundleUtil.getStringFromBundle("contact.context.dataset.ending", Arrays.asList(supportTeamName, systemEmail, dataverseSiteUrl, datasetPid, supportTeamName, systemEmail));
+                List<DvObjectContact> contacts = getDatasetContacts(dataset);
+                List<String> contactEmailList = new ArrayList<String>();
+                List<String> contactNameList = new ArrayList<String>();
+
+                for (DvObjectContact contact : contacts) {
+                    String name = getContactName(contact);
+                    if (name != null) {
+                        contactNameList.add(name);
+                    }
+                    contactEmailList.add(contact.getEmail());
                 }
-                if (!feedbacks.isEmpty()) {
-                    return feedbacks;
+                if (!contactEmailList.isEmpty()) {
+                    contactEmails = String.join(",", contactEmailList);
+                    contactGreeting = getGreeting(contactNameList);
+
+                    contextIntro = BundleUtil.getStringFromBundle("contact.context.dataset.intro", Arrays.asList(contactGreeting, userEmail, installationBrandName, datasetTitle, datasetPid));
                 } else {
-                    // TODO: Add more of an intro for the person receiving the system email in this "no dataset contact" scenario?
-                    Feedback feedback = new Feedback(userEmail, systemEmail, messageSubject, NO_DATASET_CONTACT_INTRO + userMessage + datasetContextEnding);
-                    feedbacks.add(feedback);
-                    return feedbacks;
+                    // No contacts
+                    // TODO: Add more of an intro for the person receiving the system email in this
+                    // "no dataset contact" scenario?
+                    contextIntro = NO_DATASET_CONTACT_INTRO;
+                    contactEmails = systemEmail;
+                    ccEmails = null;
                 }
             } else {
-                DataFile datafile = (DataFile) recipient;
+                // DataFile target
+                DataFile datafile = (DataFile) feedbackTarget;
                 String datasetTitle = datafile.getOwner().getLatestVersion().getTitle();
-                String datasetPid = datafile.getOwner().getGlobalIdString();
+                String datasetPid = datafile.getOwner().getGlobalId().asString();
                 String filename = datafile.getFileMetadatas().get(0).getLabel();
-                List<DvObjectContact> datasetContacts = getDatasetContacts(datafile.getOwner());
-                String fileContextEnding = BundleUtil.getStringFromBundle("contact.context.file.ending", Arrays.asList(supportTeamName, systemEmail, dataverseSiteUrl, datafile.getId().toString(), supportTeamName, systemEmail));
-                for (DvObjectContact datasetContact : datasetContacts) {
-                    String contactFullName = getGreeting(datasetContact);
-                    String fileContextIntro = BundleUtil.getStringFromBundle("contact.context.file.intro", Arrays.asList(contactFullName, userEmail, installationBrandName, filename, datasetTitle, datasetPid));
-                    Feedback feedback = new Feedback(userEmail, datasetContact.getEmail(), messageSubject, fileContextIntro + userMessage + fileContextEnding);
-                    feedbacks.add(feedback);
+                List<DvObjectContact> contacts = getDatasetContacts(datafile.getOwner());
+                contextEnding = BundleUtil.getStringFromBundle("contact.context.file.ending", Arrays.asList(supportTeamName, systemEmail, dataverseSiteUrl, datafile.getId().toString(), supportTeamName, systemEmail));
+                List<String> contactEmailList = new ArrayList<String>();
+                List<String> contactNameList = new ArrayList<String>();
+
+                for (DvObjectContact contact : contacts) {
+                    String name = getContactName(contact);
+                    if (name != null) {
+                        contactNameList.add(name);
+                    }
+                    contactEmailList.add(contact.getEmail());
                 }
-                if (!feedbacks.isEmpty()) {
-                    return feedbacks;
+                if (!contactEmailList.isEmpty()) {
+                    contactEmails = String.join(",", contactEmailList);
+                    contactGreeting = getGreeting(contactNameList);
+
+                    contextIntro = BundleUtil.getStringFromBundle("contact.context.file.intro", Arrays.asList(contactGreeting, userEmail, installationBrandName, filename, datasetTitle, datasetPid));
                 } else {
-                    // TODO: Add more of an intro for the person receiving the system email in this "no dataset contact" scenario?
-                    Feedback feedback = new Feedback(userEmail, systemEmail, messageSubject, NO_DATASET_CONTACT_INTRO + userMessage + fileContextEnding);
-                    feedbacks.add(feedback);
-                    return feedbacks;
+                    // No contacts
+                    // TODO: Add more of an intro for the person receiving the system email in this
+                    // "no dataset contact" scenario?
+                    contextIntro = NO_DATASET_CONTACT_INTRO;
+                    contactEmails = systemEmail;
+                    ccEmails = null;
                 }
             }
         } else {
+            // No target
             messageSubject = BundleUtil.getStringFromBundle("contact.context.subject.support", Arrays.asList(installationBrandName, messageSubject));
-            String noDvObjectContextIntro = BundleUtil.getStringFromBundle("contact.context.support.intro", Arrays.asList(supportTeamName, userEmail));
-            String noDvObjectContextEnding = BundleUtil.getStringFromBundle("contact.context.support.ending", Arrays.asList(""));
-            Feedback feedback = new Feedback(userEmail, systemEmail, messageSubject, noDvObjectContextIntro + userMessage + noDvObjectContextEnding);
-            feedbacks.add(feedback);
-            return feedbacks;
+            contextIntro = BundleUtil.getStringFromBundle("contact.context.support.intro", Arrays.asList(supportTeamName, userEmail));
+            contextEnding = BundleUtil.getStringFromBundle("contact.context.support.ending", Arrays.asList(""));
+            contactEmails = systemEmail;
+            ccEmails = null;
         }
+        feedback = new Feedback(userEmail, contactEmails, ccEmails, messageSubject, contextIntro + userMessage + contextEnding);
+        return feedback;
     }
 
     private static boolean isLoggedIn(DataverseSession dataverseSession) {
@@ -156,30 +191,53 @@ private static List<DvObjectContact> getDatasetContacts(Dataset dataset) {
     }
 
     /**
-     * When contacts are people we suggest that they be stored as "Simpson,
-     * Homer" so the idea of this method is that it returns "Homer Simpson", if
-     * it can.
+     * When contacts are people we suggest that they be stored as "Simpson, Homer"
+     * so the idea of this method is that it returns "Homer Simpson", if it can.
      *
      * Contacts don't necessarily have to be people, however. They can be
-     * organizations. We ran into similar trouble (but for authors) when
-     * implementing Schema.org JSON-LD support. See getJsonLd on DatasetVersion.
-     * Some day it might be nice to store whether an author or a contact is a
-     * person or an organization.
+     * organizations. This method uses the algorithm to detect whether an entry is a
+     * Person or Organization and relies on it to create a full name, i.e. removing
+     * the comma and reversion the order of names for a Person but not changing the
+     * string for an Organization.
+     */
+    private static String getContactName(DvObjectContact dvObjectContact) {
+        String contactName = dvObjectContact.getName();
+        String name = null;
+        if (contactName != null) {
+            JsonObject entity = PersonOrOrgUtil.getPersonOrOrganization(contactName, false, false);
+            if (entity.getBoolean("isPerson") && entity.containsKey("givenName") && entity.containsKey("familyName")) {
+                name = entity.getString("givenName") + " " + entity.getString("familyName");
+            } else {
+                name = entity.getString("fullName");
+            }
+        }
+        return name;
+
+    }
+
+    /**
+     * Concatenates names using commas and a final 'and' and creates the greeting
+     * string, e.g. "Hello Homer Simpson, Bart Simson, and Marge Simpson"
      */
-    private static String getGreeting(DvObjectContact dvObjectContact) {
-        logger.fine("dvObjectContact: " + dvObjectContact);
-        try {
-            String name = dvObjectContact.getName();
-            logger.fine("dvObjectContact name: " + name);
-            String lastFirstString = dvObjectContact.getName();
-            String[] lastFirstParts = lastFirstString.split(",");
-            String last = lastFirstParts[0];
-            String first = lastFirstParts[1];
-            return BundleUtil.getStringFromBundle("contact.context.dataset.greeting.helloFirstLast", Arrays.asList(first.trim(), last.trim()));
-        } catch (Exception ex) {
-            logger.warning("problem in getGreeting: " + ex);
+    private static String getGreeting(List<String> contactNameList) {
+        int size = contactNameList.size();
+        String nameString;
+        String finalName = null;
+        // Treat the final name separately
+        switch (size) {
+        case 0:
             return BundleUtil.getStringFromBundle("contact.context.dataset.greeting.organization");
+        case 1:
+            nameString = contactNameList.get(0);
+            break;
+        case 2:
+            nameString = contactNameList.get(0) + " and " + contactNameList.get(1);
+            break;
+        default:
+            finalName = contactNameList.remove(size - 1);
+            nameString = String.join(",", contactNameList) + ", and " + finalName;
         }
+        return BundleUtil.getStringFromBundle("contact.context.dataset.greeting.helloFirstLast", Arrays.asList(nameString));
     }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java b/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java
index 71b53bd43f2..39bc46216ca 100644
--- a/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/flyway/StartupFlywayMigrator.java
@@ -2,12 +2,12 @@
 
 import org.flywaydb.core.Flyway;
 
-import javax.annotation.PostConstruct;
-import javax.annotation.Resource;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
-import javax.ejb.TransactionManagement;
-import javax.ejb.TransactionManagementType;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.Resource;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
+import jakarta.ejb.TransactionManagement;
+import jakarta.ejb.TransactionManagementType;
 import javax.sql.DataSource;
 
 @Startup
diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java
index 877fc68e4a1..c93e2c6aa94 100644
--- a/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java
+++ b/src/main/java/edu/harvard/iq/dataverse/globus/AccessToken.java
@@ -46,7 +46,7 @@ String getRefreshToken() {
         return refreshToken;
     }
 
-    ArrayList<AccessToken> getOtherTokens() {
+    public ArrayList<AccessToken> getOtherTokens() {
         return otherTokens;
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusEndpoint.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusEndpoint.java
new file mode 100644
index 00000000000..7e555935e2e
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusEndpoint.java
@@ -0,0 +1,38 @@
+package edu.harvard.iq.dataverse.globus;
+
+public class GlobusEndpoint {
+
+    private String id;
+    private String clientToken;
+    private String basePath;
+
+    public GlobusEndpoint(String id, String clientToken, String basePath) {
+        this.id = id;
+        this.clientToken = clientToken;
+        this.basePath = basePath;
+    }
+
+    public String getId() {
+        return id;
+    }
+
+    public void setId(String id) {
+        this.id = id;
+    }
+
+    public String getClientToken() {
+        return clientToken;
+    }
+
+    public void setClientToken(String clientToken) {
+        this.clientToken = clientToken;
+    }
+
+    public String getBasePath() {
+        return basePath;
+    }
+
+    public void setBasePath(String basePath) {
+        this.basePath = basePath;
+    }
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
index 9d80c5cc280..d0660a55a6a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java
@@ -1,22 +1,29 @@
 package edu.harvard.iq.dataverse.globus;
 
+import com.github.benmanes.caffeine.cache.Cache;
+import com.github.benmanes.caffeine.cache.Caffeine;
+import com.github.benmanes.caffeine.cache.Scheduler;
 import com.google.gson.FieldNamingPolicy;
 import com.google.gson.GsonBuilder;
 import edu.harvard.iq.dataverse.*;
-
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonPatch;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonPatch;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.json.stream.JsonParsingException;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.HttpMethod;
 
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
 import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray;
@@ -29,6 +36,8 @@
 import java.net.URLEncoder;
 import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
+import java.time.Duration;
+import java.time.temporal.ChronoUnit;
 import java.util.*;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.ExecutionException;
@@ -40,17 +49,26 @@
 import java.util.stream.Collectors;
 import java.util.stream.IntStream;
 
+import org.apache.commons.codec.binary.StringUtils;
+import org.primefaces.PrimeFaces;
+
 import com.google.gson.Gson;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.authorization.users.User;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.util.URLTokenUtil;
+import edu.harvard.iq.dataverse.util.UrlSignerUtil;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
 @Stateless
@@ -59,197 +77,243 @@ public class GlobusServiceBean implements java.io.Serializable {
 
     @EJB
     protected DatasetServiceBean datasetSvc;
-
     @EJB
     protected SettingsServiceBean settingsSvc;
-
     @Inject
     DataverseSession session;
-
     @EJB
     protected AuthenticationServiceBean authSvc;
-
     @EJB
     EjbDataverseEngine commandEngine;
-
     @EJB
     UserNotificationServiceBean userNotificationService;
+    @EJB
+    PrivateUrlServiceBean privateUrlService;
+    @EJB
+    FileDownloadServiceBean fileDownloadService;
+    @EJB
+    DataFileServiceBean dataFileService;
 
     private static final Logger logger = Logger.getLogger(GlobusServiceBean.class.getCanonicalName());
     private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss");
 
-    private String code;
-    private String userTransferToken;
-    private String state;
-
-    public String getState() {
-        return state;
-    }
-
-    public void setState(String state) {
-        this.state = state;
-    }
-
-    public String getCode() {
-        return code;
-    }
-
-    public void setCode(String code) {
-        this.code = code;
-    }
+    private String getRuleId(GlobusEndpoint endpoint, String principal, String permissions)
+            throws MalformedURLException {
 
-    public String getUserTransferToken() {
-        return userTransferToken;
-    }
+        String principalType = "identity";
 
-    public void setUserTransferToken(String userTransferToken) {
-        this.userTransferToken = userTransferToken;
-    }
-
-    ArrayList<String> checkPermisions(AccessToken clientTokenUser, String directory, String globusEndpoint,
-            String principalType, String principal) throws MalformedURLException {
-        URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access_list");
-        MakeRequestResponse result = makeRequest(url, "Bearer",
-                clientTokenUser.getOtherTokens().get(0).getAccessToken(), "GET", null);
-        ArrayList<String> ids = new ArrayList<String>();
+        URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access_list");
+        MakeRequestResponse result = makeRequest(url, "Bearer", endpoint.getClientToken(), "GET", null);
         if (result.status == 200) {
             AccessList al = parseJson(result.jsonResponse, AccessList.class, false);
 
             for (int i = 0; i < al.getDATA().size(); i++) {
                 Permissions pr = al.getDATA().get(i);
-                if ((pr.getPath().equals(directory + "/") || pr.getPath().equals(directory))
+
+                if ((pr.getPath().equals(endpoint.getBasePath() + "/") || pr.getPath().equals(endpoint.getBasePath()))
                         && pr.getPrincipalType().equals(principalType)
-                        && ((principal == null) || (principal != null && pr.getPrincipal().equals(principal)))) {
-                    ids.add(pr.getId());
+                        && ((principal == null) || (principal != null && pr.getPrincipal().equals(principal)))
+                        && pr.getPermissions().equals(permissions)) {
+                    return pr.getId();
                 } else {
-                    logger.info(pr.getPath() + " === " + directory + " == " + pr.getPrincipalType());
+                    logger.fine(pr.getPath() + " === " + endpoint.getBasePath() + " == " + pr.getPrincipalType());
                     continue;
                 }
             }
         }
-
-        return ids;
+        return null;
     }
 
-    public void updatePermision(AccessToken clientTokenUser, String directory, String principalType, String perm)
-            throws MalformedURLException {
-        if (directory != null && !directory.equals("")) {
-            directory = directory + "/";
-        }
-        logger.info("Start updating permissions." + " Directory is " + directory);
-        String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, "");
-        ArrayList<String> rules = checkPermisions(clientTokenUser, directory, globusEndpoint, principalType, null);
-        logger.info("Size of rules " + rules.size());
-        int count = 0;
-        while (count < rules.size()) {
-            logger.info("Start removing rules " + rules.get(count));
-            Permissions permissions = new Permissions();
-            permissions.setDATA_TYPE("access");
-            permissions.setPermissions(perm);
-            permissions.setPath(directory);
-
-            Gson gson = new GsonBuilder().create();
-            URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/"
-                    + rules.get(count));
-            logger.info("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/"
-                    + rules.get(count));
-            MakeRequestResponse result = makeRequest(url, "Bearer",
-                    clientTokenUser.getOtherTokens().get(0).getAccessToken(), "PUT", gson.toJson(permissions));
-            if (result.status != 200) {
-                logger.warning("Cannot update access rule " + rules.get(count));
-            } else {
-                logger.info("Access rule " + rules.get(count) + " was updated");
-            }
-            count++;
-        }
-    }
-
-    public void deletePermision(String ruleId, Logger globusLogger) throws MalformedURLException {
-
+    /**
+     * Call to delete a globus rule related to the specified dataset.
+     * 
+     * @param ruleId       - Globus rule id - assumed to be associated with the
+     *                     dataset's file path (should not be called with a user
+     *                     specified rule id w/o further checking)
+     * @param datasetId    - the id of the dataset associated with the rule
+     * @param globusLogger - a separate logger instance, may be null
+     */
+    public void deletePermission(String ruleId, Dataset dataset, Logger globusLogger) {
+        globusLogger.info("Start deleting rule " + ruleId + " for dataset " + dataset.getId());
         if (ruleId.length() > 0) {
-            AccessToken clientTokenUser = getClientToken();
-            globusLogger.info("Start deleting permissions.");
-            String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, "");
-
-            URL url = new URL(
-                    "https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/" + ruleId);
-            MakeRequestResponse result = makeRequest(url, "Bearer",
-                    clientTokenUser.getOtherTokens().get(0).getAccessToken(), "DELETE", null);
-            if (result.status != 200) {
-                globusLogger.warning("Cannot delete access rule " + ruleId);
-            } else {
-                globusLogger.info("Access rule " + ruleId + " was deleted successfully");
+            if (dataset != null) {
+                GlobusEndpoint endpoint = getGlobusEndpoint(dataset);
+                if (endpoint != null) {
+                    String accessToken = endpoint.getClientToken();
+                    globusLogger.info("Start deleting permissions.");
+                    try {
+                        URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId()
+                                + "/access/" + ruleId);
+                        MakeRequestResponse result = makeRequest(url, "Bearer", accessToken, "DELETE", null);
+                        if (result.status != 200) {
+                            globusLogger.warning("Cannot delete access rule " + ruleId);
+                        } else {
+                            globusLogger.info("Access rule " + ruleId + " was deleted successfully");
+                        }
+                    } catch (MalformedURLException ex) {
+                        logger.log(Level.WARNING,
+                                "Failed to delete access rule " + ruleId + " on endpoint " + endpoint.getId(), ex);
+                    }
+                }
             }
         }
-
     }
 
-    public int givePermission(String principalType, String principal, String perm, AccessToken clientTokenUser,
-            String directory, String globusEndpoint) throws MalformedURLException {
+    /**
+     * Request read/write access for the specified principal and generate a list of
+     * accessible paths for new files for the specified dataset.
+     * 
+     * @param principal     - the id of the Globus principal doing the transfer
+     * @param dataset
+     * @param numberOfPaths - how many files are to be transferred
+     * @return
+     */
+    public JsonObject requestAccessiblePaths(String principal, Dataset dataset, int numberOfPaths) {
 
-        ArrayList<?> rules = checkPermisions(clientTokenUser, directory, globusEndpoint, principalType, principal);
+        GlobusEndpoint endpoint = getGlobusEndpoint(dataset);
+        String principalType = "identity";
 
         Permissions permissions = new Permissions();
         permissions.setDATA_TYPE("access");
         permissions.setPrincipalType(principalType);
         permissions.setPrincipal(principal);
-        permissions.setPath(directory + "/");
-        permissions.setPermissions(perm);
+        permissions.setPath(endpoint.getBasePath() + "/");
+        permissions.setPermissions("rw");
+
+        JsonObjectBuilder response = Json.createObjectBuilder();
+        response.add("status", requestPermission(endpoint, dataset, permissions));
+        String driverId = dataset.getEffectiveStorageDriverId();
+        JsonObjectBuilder paths = Json.createObjectBuilder();
+        for (int i = 0; i < numberOfPaths; i++) {
+            String storageIdentifier = DataAccess.getNewStorageIdentifier(driverId);
+            int lastIndex = Math.max(storageIdentifier.lastIndexOf("/"), storageIdentifier.lastIndexOf(":"));
+            paths.add(storageIdentifier, endpoint.getBasePath() + "/" + storageIdentifier.substring(lastIndex + 1));
 
+        }
+        response.add("paths", paths.build());
+        return response.build();
+    }
+
+    private int requestPermission(GlobusEndpoint endpoint, Dataset dataset, Permissions permissions) {
         Gson gson = new GsonBuilder().create();
         MakeRequestResponse result = null;
-        if (rules.size() == 0) {
-            logger.info("Start creating the rule");
-            URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access");
-            result = makeRequest(url, "Bearer", clientTokenUser.getOtherTokens().get(0).getAccessToken(), "POST",
-                    gson.toJson(permissions));
+        logger.info("Start creating the rule");
 
-            if (result.status == 400) {
+        try {
+            URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + endpoint.getId() + "/access");
+            result = makeRequest(url, "Bearer", endpoint.getClientToken(), "POST", gson.toJson(permissions));
+
+            switch (result.status) {
+            case 404:
+                logger.severe("Endpoint " + endpoint.getId() + " was not found");
+                break;
+            case 400:
                 logger.severe("Path " + permissions.getPath() + " is not valid");
-            } else if (result.status == 409) {
+                break;
+            case 409:
                 logger.warning("ACL already exists or Endpoint ACL already has the maximum number of access rules");
+                break;
+            case 201:
+                JsonObject globusResponse = JsonUtil.getJsonObject(result.jsonResponse);
+                if (globusResponse != null && globusResponse.containsKey("access_id")) {
+                    permissions.setId(globusResponse.getString("access_id"));
+                    monitorTemporaryPermissions(permissions.getId(), dataset.getId());
+                    logger.info("Access rule " + permissions.getId() + " was created successfully");
+                } else {
+                    // Shouldn't happen!
+                    logger.warning("Access rule id not returned for dataset " + dataset.getId());
+                }
             }
-
             return result.status;
-        } else {
-            logger.info("Start Updating the rule");
-            URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/access/"
-                    + rules.get(0));
-            result = makeRequest(url, "Bearer", clientTokenUser.getOtherTokens().get(0).getAccessToken(), "PUT",
-                    gson.toJson(permissions));
-
-            if (result.status == 400) {
-                logger.severe("Path " + permissions.getPath() + " is not valid");
-            } else if (result.status == 409) {
-                logger.warning("ACL already exists or Endpoint ACL already has the maximum number of access rules");
-            }
-            logger.info("Result status " + result.status);
+        } catch (MalformedURLException ex) {
+            // Misconfiguration
+            logger.warning("Failed to create access rule URL for " + endpoint.getId());
+            return 500;
         }
-
-        return result.status;
     }
 
-    public boolean getSuccessfulTransfers(AccessToken clientTokenUser, String taskId) throws MalformedURLException {
-
-        URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/" + taskId
-                + "/successful_transfers");
-
-        MakeRequestResponse result = makeRequest(url, "Bearer",
-                clientTokenUser.getOtherTokens().get(0).getAccessToken(), "GET", null);
+    /**
+     * Given an array of remote files to be referenced in the dataset, create a set
+     * of valid storage identifiers and return a map of the remote file paths to
+     * storage identifiers.
+     * 
+     * @param dataset
+     * @param referencedFiles - a JSON array of remote files to be referenced in the
+     *                        dataset - each should be a string with the <Globus
+     *                        endpoint>/path/to/file
+     * @return - a map of supplied paths to valid storage identifiers
+     */
+    public JsonObject requestReferenceFileIdentifiers(Dataset dataset, JsonArray referencedFiles) {
+        String driverId = dataset.getEffectiveStorageDriverId();
+        JsonArray endpoints = GlobusAccessibleStore.getReferenceEndpointsWithPaths(driverId);
+
+        JsonObjectBuilder fileMap = Json.createObjectBuilder();
+        referencedFiles.forEach(value -> {
+            if (value.getValueType() != ValueType.STRING) {
+                throw new JsonParsingException("ReferencedFiles must be strings", null);
+            }
+            String referencedFile = ((JsonString) value).getString();
+            boolean valid = false;
+            for (int i = 0; i < endpoints.size(); i++) {
+                if (referencedFile.startsWith(((JsonString) endpoints.get(i)).getString())) {
+                    valid = true;
+                }
+            }
+            if (!valid) {
+                throw new IllegalArgumentException(
+                        "Referenced file " + referencedFile + " is not in an allowed endpoint/path");
+            }
+            String storageIdentifier = DataAccess.getNewStorageIdentifier(driverId);
+            fileMap.add(referencedFile, storageIdentifier + "//" + referencedFile);
+        });
+        return fileMap.build();
+    }
 
-        if (result.status == 200) {
-            logger.info(" SUCCESS ====== ");
-            return true;
-        }
-        return false;
+    /**
+     * A cache of temporary permission requests - for upload (rw) and download (r)
+     * access. When a temporary permission request is created, it is added to the
+     * cache. After GLOBUS_CACHE_MAXAGE minutes, if a transfer has not been started,
+     * the permission will be revoked/deleted. (If a transfer has been started, the
+     * permission will not be revoked/deleted until the transfer is complete. This
+     * is handled in other methods.)
+     */
+    // ToDo - nominally this doesn't need to be as long as the allowed time for the
+    // downloadCache so there could be two separate settings.
+    // Single cache of open rules/permission requests
+    private final Cache<String, Long> rulesCache = Caffeine.newBuilder()
+            .expireAfterWrite(Duration.of(JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class), ChronoUnit.MINUTES))
+            .scheduler(Scheduler.systemScheduler()).evictionListener((ruleId, datasetId, cause) -> {
+                // Delete rules that expire
+                logger.fine("Rule " + ruleId + " expired");
+                Dataset dataset = datasetSvc.find(datasetId);
+                deletePermission((String) ruleId, dataset, logger);
+            })
+
+            .build();
+
+    // Convenience method to add a temporary permission request to the cache -
+    // allows logging of temporary permission requests
+    private void monitorTemporaryPermissions(String ruleId, long datasetId) {
+        logger.fine("Adding rule " + ruleId + " for dataset " + datasetId);
+        rulesCache.put(ruleId, datasetId);
     }
 
-    public GlobusTask getTask(AccessToken clientTokenUser, String taskId, Logger globusLogger) throws MalformedURLException {
+    /**
+     * Call the Globus API to get info about the transfer.
+     * 
+     * @param accessToken
+     * @param taskId       - the Globus task id supplied by the user
+     * @param globusLogger - the transaction-specific logger to use (separate log
+     *                     files are created in general, some calls may use the
+     *                     class logger)
+     * @return
+     * @throws MalformedURLException
+     */
+    public GlobusTask getTask(String accessToken, String taskId, Logger globusLogger) throws MalformedURLException {
 
         URL url = new URL("https://transfer.api.globusonline.org/v0.10/endpoint_manager/task/" + taskId);
 
-        MakeRequestResponse result = makeRequest(url, "Bearer",
-                clientTokenUser.getOtherTokens().get(0).getAccessToken(), "GET", null);
+        MakeRequestResponse result = makeRequest(url, "Bearer", accessToken, "GET", null);
 
         GlobusTask task = null;
 
@@ -264,49 +328,34 @@ public GlobusTask getTask(AccessToken clientTokenUser, String taskId, Logger glo
         return task;
     }
 
-    public AccessToken getClientToken() throws MalformedURLException {
-        String globusBasicToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "");
-        URL url = new URL(
-                "https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials");
-
-        MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null);
+    /**
+     * Globus call to get an access token for the user using the long-term token we
+     * hold.
+     * 
+     * @param globusBasicToken - the base64 encoded Globus Basic token comprised of
+     *                         the <Globus user id>:<key>
+     * @return - a valid Globus access token
+     */
+    public static AccessToken getClientToken(String globusBasicToken) {
+        URL url;
         AccessToken clientTokenUser = null;
-        if (result.status == 200) {
-            clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true);
-        }
-        return clientTokenUser;
-    }
 
-    public AccessToken getAccessToken(HttpServletRequest origRequest, String globusBasicToken)
-            throws UnsupportedEncodingException, MalformedURLException {
-        String serverName = origRequest.getServerName();
-        if (serverName.equals("localhost")) {
-            logger.severe("Changing localhost to utoronto");
-            serverName = "utl-192-123.library.utoronto.ca";
-        }
-
-        String redirectURL = "https://" + serverName + "/globus.xhtml";
-
-        redirectURL = URLEncoder.encode(redirectURL, "UTF-8");
-
-        URL url = new URL("https://auth.globus.org/v2/oauth2/token?code=" + code + "&redirect_uri=" + redirectURL
-                + "&grant_type=authorization_code");
-        logger.info(url.toString());
-
-        MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null);
-        AccessToken accessTokenUser = null;
+        try {
+            url = new URL(
+                    "https://auth.globus.org/v2/oauth2/token?scope=openid+email+profile+urn:globus:auth:scope:transfer.api.globus.org:all&grant_type=client_credentials");
 
-        if (result.status == 200) {
-            logger.info("Access Token: \n" + result.toString());
-            accessTokenUser = parseJson(result.jsonResponse, AccessToken.class, true);
-            logger.info(accessTokenUser.getAccessToken());
+            MakeRequestResponse result = makeRequest(url, "Basic", globusBasicToken, "POST", null);
+            if (result.status == 200) {
+                clientTokenUser = parseJson(result.jsonResponse, AccessToken.class, true);
+            }
+        } catch (MalformedURLException e) {
+            // On a statically defined URL...
+            e.printStackTrace();
         }
-
-        return accessTokenUser;
-
+        return clientTokenUser;
     }
 
-    public MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method,
+    private static MakeRequestResponse makeRequest(URL url, String authType, String authCode, String method,
             String jsonString) {
         String str = null;
         HttpURLConnection connection = null;
@@ -314,8 +363,8 @@ public MakeRequestResponse makeRequest(URL url, String authType, String authCode
         try {
             connection = (HttpURLConnection) url.openConnection();
             // Basic
-            // NThjMGYxNDQtN2QzMy00ZTYzLTk3MmUtMjljNjY5YzJjNGJiOktzSUVDMDZtTUxlRHNKTDBsTmRibXBIbjZvaWpQNGkwWVVuRmQyVDZRSnc9
             logger.info(authType + " " + authCode);
+            logger.fine("For URL: " + url.toString());
             connection.setRequestProperty("Authorization", authType + " " + authCode);
             // connection.setRequestProperty("Content-Type",
             // "application/x-www-form-urlencoded");
@@ -323,32 +372,30 @@ public MakeRequestResponse makeRequest(URL url, String authType, String authCode
             if (jsonString != null) {
                 connection.setRequestProperty("Content-Type", "application/json");
                 connection.setRequestProperty("Accept", "application/json");
-                logger.info(jsonString);
+                logger.fine(jsonString);
                 connection.setDoOutput(true);
+
                 OutputStreamWriter wr = new OutputStreamWriter(connection.getOutputStream());
                 wr.write(jsonString);
                 wr.flush();
             }
 
             status = connection.getResponseCode();
-            logger.info("Status now " + status);
+            logger.fine("Status now " + status);
             InputStream result = connection.getInputStream();
             if (result != null) {
-                logger.info("Result is not null");
                 str = readResultJson(result).toString();
-                logger.info("str is ");
-                logger.info(result.toString());
+                logger.fine("str is " + result.toString());
             } else {
-                logger.info("Result is null");
+                logger.fine("Result is null");
                 str = null;
             }
 
-            logger.info("status: " + status);
+            logger.fine("status: " + status);
         } catch (IOException ex) {
-            logger.info("IO");
             logger.severe(ex.getMessage());
-            logger.info(ex.getCause().toString());
-            logger.info(ex.getStackTrace().toString());
+            logger.fine(ex.getCause().toString());
+            logger.fine(ex.getStackTrace().toString());
         } finally {
             if (connection != null) {
                 connection.disconnect();
@@ -359,18 +406,16 @@ public MakeRequestResponse makeRequest(URL url, String authType, String authCode
 
     }
 
-    private StringBuilder readResultJson(InputStream in) {
+    private static StringBuilder readResultJson(InputStream in) {
         StringBuilder sb = null;
-        try {
-
-            BufferedReader br = new BufferedReader(new InputStreamReader(in));
+        try (BufferedReader br = new BufferedReader(new InputStreamReader(in))) {
             sb = new StringBuilder();
             String line;
             while ((line = br.readLine()) != null) {
                 sb.append(line + "\n");
             }
             br.close();
-            logger.info(sb.toString());
+            logger.fine(sb.toString());
         } catch (IOException e) {
             sb = null;
             logger.severe(e.getMessage());
@@ -378,7 +423,7 @@ private StringBuilder readResultJson(InputStream in) {
         return sb;
     }
 
-    private <T> T parseJson(String sb, Class<T> jsonParserClass, boolean namingPolicy) {
+    private static <T> T parseJson(String sb, Class<T> jsonParserClass, boolean namingPolicy) {
         if (sb != null) {
             Gson gson = null;
             if (namingPolicy) {
@@ -395,32 +440,7 @@ private <T> T parseJson(String sb, Class<T> jsonParserClass, boolean namingPolic
         }
     }
 
-    public String getDirectory(String datasetId) {
-        Dataset dataset = null;
-        String directory = null;
-        try {
-            dataset = datasetSvc.find(Long.parseLong(datasetId));
-            if (dataset == null) {
-                logger.severe("Dataset not found " + datasetId);
-                return null;
-            }
-            String storeId = dataset.getStorageIdentifier();
-            storeId.substring(storeId.indexOf("//") + 1);
-            directory = storeId.substring(storeId.indexOf("//") + 1);
-            logger.info(storeId);
-            logger.info(directory);
-            logger.info("Storage identifier:" + dataset.getIdentifierForFileStorage());
-            return directory;
-
-        } catch (NumberFormatException nfe) {
-            logger.severe(nfe.getMessage());
-
-            return null;
-        }
-
-    }
-
-    class MakeRequestResponse {
+    static class MakeRequestResponse {
         public String jsonResponse;
         public int status;
 
@@ -431,81 +451,61 @@ class MakeRequestResponse {
 
     }
 
-    private MakeRequestResponse findDirectory(String directory, AccessToken clientTokenUser, String globusEndpoint)
-            throws MalformedURLException {
-        URL url = new URL(" https://transfer.api.globusonline.org/v0.10/endpoint/" + globusEndpoint + "/ls?path="
-                + directory + "/");
-
-        MakeRequestResponse result = makeRequest(url, "Bearer",
-                clientTokenUser.getOtherTokens().get(0).getAccessToken(), "GET", null);
-        logger.info("find directory status:" + result.status);
-
-        return result;
+    /**
+     * Cache of open download Requests This cache keeps track of the set of files
+     * selected for transfer out (download) via Globus. It is a means of
+     * transferring the list from the DatasetPage, where it is generated via user UI
+     * actions, and the Datasets/globusDownloadParameters API.
+     * 
+     * Nominally, the dataverse-globus app will call that API endpoint and then
+     * /requestGlobusDownload, at which point the cached info is sent to the app. If
+     * the app doesn't call within 5 minutes (the time allowed to call
+     * /globusDownloadParameters) + GLOBUS_CACHE_MAXAGE minutes (a ~longer period
+     * giving the user time to make choices in the app), the cached info is deleted.
+     * 
+     */
+    private final Cache<String, JsonObject> downloadCache = Caffeine.newBuilder()
+            .expireAfterWrite(
+                    Duration.of(JvmSettings.GLOBUS_CACHE_MAXAGE.lookup(Integer.class) + 5, ChronoUnit.MINUTES))
+            .scheduler(Scheduler.systemScheduler()).evictionListener((downloadId, datasetId, cause) -> {
+                // Delete downloads that expire
+                logger.fine("Download for " + downloadId + " expired");
+            })
+
+            .build();
+
+    public JsonObject getFilesForDownload(String downloadId) {
+        return downloadCache.getIfPresent(downloadId);
     }
 
-    public boolean giveGlobusPublicPermissions(String datasetId)
-            throws UnsupportedEncodingException, MalformedURLException {
-
-        String globusEndpoint = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusEndpoint, "");
-        String globusBasicToken = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "");
-        if (globusEndpoint.equals("") || globusBasicToken.equals("")) {
-            return false;
-        }
-        AccessToken clientTokenUser = getClientToken();
-        if (clientTokenUser == null) {
-            logger.severe("Cannot get client token ");
-            return false;
-        }
-
-        String directory = getDirectory(datasetId);
-        logger.info(directory);
+    public int setPermissionForDownload(Dataset dataset, String principal) {
+        GlobusEndpoint endpoint = getGlobusEndpoint(dataset);
+        String principalType = "identity";
 
-        MakeRequestResponse status = findDirectory(directory, clientTokenUser, globusEndpoint);
-
-        if (status.status == 200) {
-
-            /*
-             * FilesList fl = parseJson(status.jsonResponse, FilesList.class, false);
-             * ArrayList<FileG> files = fl.getDATA(); if (files != null) { for (FileG file:
-             * files) { if (!file.getName().contains("cached") &&
-             * !file.getName().contains(".thumb")) { int perStatus =
-             * givePermission("all_authenticated_users", "", "r", clientTokenUser, directory
-             * + "/" + file.getName(), globusEndpoint); logger.info("givePermission status "
-             * + perStatus + " for " + file.getName()); if (perStatus == 409) {
-             * logger.info("Permissions already exist or limit was reached for " +
-             * file.getName()); } else if (perStatus == 400) {
-             * logger.info("No file in Globus " + file.getName()); } else if (perStatus !=
-             * 201) { logger.info("Cannot get permission for " + file.getName()); } } } }
-             */
-
-            int perStatus = givePermission("all_authenticated_users", "", "r", clientTokenUser, directory,
-                    globusEndpoint);
-            logger.info("givePermission status " + perStatus);
-            if (perStatus == 409) {
-                logger.info("Permissions already exist or limit was reached");
-            } else if (perStatus == 400) {
-                logger.info("No directory in Globus");
-            } else if (perStatus != 201 && perStatus != 200) {
-                logger.info("Cannot give read permission");
-                return false;
-            }
-
-        } else if (status.status == 404) {
-            logger.info("There is no globus directory");
-        } else {
-            logger.severe("Cannot find directory in globus, status " + status);
-            return false;
-        }
+        Permissions permissions = new Permissions();
+        permissions.setDATA_TYPE("access");
+        permissions.setPrincipalType(principalType);
+        permissions.setPrincipal(principal);
+        permissions.setPath(endpoint.getBasePath() + "/");
+        permissions.setPermissions("r");
 
-        return true;
+        return requestPermission(endpoint, dataset, permissions);
     }
 
-    // Generates the URL to launch the Globus app
+    // Generates the URL to launch the Globus app for upload
     public String getGlobusAppUrlForDataset(Dataset d) {
         return getGlobusAppUrlForDataset(d, true, null);
     }
 
-    public String getGlobusAppUrlForDataset(Dataset d, boolean upload, DataFile df) {
+    /**
+     * Generated the App URl for upload (in) or download (out)
+     * 
+     * @param d         - the dataset involved
+     * @param upload    - boolean, true for upload, false for download
+     * @param dataFiles - a list of the DataFiles to be downloaded
+     * @return
+     */
+    public String getGlobusAppUrlForDataset(Dataset d, boolean upload, List<DataFile> dataFiles) {
         String localeCode = session.getLocaleCode();
         ApiToken apiToken = null;
         User user = session.getUser();
@@ -518,46 +518,53 @@ public String getGlobusAppUrlForDataset(Dataset d, boolean upload, DataFile df)
                 apiToken = authSvc.generateApiTokenForUser((AuthenticatedUser) user);
             }
         }
-        String storePrefix = "";
         String driverId = d.getEffectiveStorageDriverId();
         try {
-            storePrefix = DataAccess.getDriverPrefix(driverId);
         } catch (Exception e) {
             logger.warning("GlobusAppUrlForDataset: Failed to get storePrefix for " + driverId);
         }
-        //Use URLTokenUtil for params currently in common with external tools. 
-        URLTokenUtil tokenUtil = new URLTokenUtil(d, df, apiToken, localeCode);
-        String appUrl;
+
+        // Use URLTokenUtil for params currently in common with external tools.
+        URLTokenUtil tokenUtil = new URLTokenUtil(d, null, apiToken, localeCode);
+        String appUrl = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusAppUrl, "http://localhost");
+        String callback = null;
         if (upload) {
-            appUrl = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusAppUrl, "http://localhost")
-                    + "/upload?datasetPid={datasetPid}&siteUrl={siteUrl}&apiToken={apiToken}&datasetId={datasetId}&datasetVersion={datasetVersion}&dvLocale={localeCode}";
+            appUrl = appUrl + "/upload?dvLocale={localeCode}";
+            callback = SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" + d.getId()
+                    + "/globusUploadParameters?locale=" + localeCode;
         } else {
-            if (df == null) {
-                appUrl = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusAppUrl, "http://localhost")
-                        + "/download?datasetPid={datasetPid}&siteUrl={siteUrl}"
-                        + ((apiToken != null) ? "&apiToken={apiToken}" : "")
-                        + "&datasetId={datasetId}&datasetVersion={datasetVersion}&dvLocale={localeCode}";
-            } else {
-                String rawStorageId = df.getStorageIdentifier();
-                rawStorageId=rawStorageId.substring(rawStorageId.lastIndexOf(":")+1);
-                appUrl = settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusAppUrl, "http://localhost")
-                        + "/download-file?datasetPid={datasetPid}&siteUrl={siteUrl}"
-                        + ((apiToken != null) ? "&apiToken={apiToken}" : "")
-                        + "&datasetId={datasetId}&datasetVersion={datasetVersion}&dvLocale={localeCode}&fileId={fileId}&storageIdentifier="
-                        + rawStorageId + "&fileName=" + df.getCurrentName();
-            }
+            // Download
+            JsonObject files = GlobusUtil.getFilesMap(dataFiles, d);
+
+            String downloadId = UUID.randomUUID().toString();
+            downloadCache.put(downloadId, files);
+            appUrl = appUrl + "/download?dvLocale={localeCode}";
+            callback = SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" + d.getId()
+                    + "/globusDownloadParameters?locale=" + localeCode + "&downloadId=" + downloadId;
+
+        }
+        if (apiToken != null) {
+            callback = UrlSignerUtil.signUrl(callback, 5, apiToken.getAuthenticatedUser().getUserIdentifier(),
+                    HttpMethod.GET,
+                    JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("") + apiToken.getTokenString());
+        } else {
+            // Shouldn't happen
+            logger.warning("Unable to get api token for user: " + user.getIdentifier());
         }
-        return tokenUtil.replaceTokensWithValues(appUrl) + "&storePrefix=" + storePrefix;
+        appUrl = appUrl + "&callback=" + Base64.getEncoder().encodeToString(StringUtils.getBytesUtf8(callback));
+
+        String finalUrl = tokenUtil.replaceTokensWithValues(appUrl);
+        logger.fine("Calling app: " + finalUrl);
+        return finalUrl;
     }
 
-    public String getGlobusDownloadScript(Dataset dataset, ApiToken apiToken) {
-        return URLTokenUtil.getScriptForUrl(getGlobusAppUrlForDataset(dataset, false, null));
-        
+    private String getGlobusDownloadScript(Dataset dataset, ApiToken apiToken, List<DataFile> downloadDFList) {
+        return URLTokenUtil.getScriptForUrl(getGlobusAppUrlForDataset(dataset, false, downloadDFList));
     }
-    
+
     @Asynchronous
     @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
-    public void globusUpload(String jsonData, ApiToken token, Dataset dataset, String httpRequestUrl,
+    public void globusUpload(JsonObject jsonData, ApiToken token, Dataset dataset, String httpRequestUrl,
             AuthenticatedUser authUser) throws ExecutionException, InterruptedException, MalformedURLException {
 
         Integer countAll = 0;
@@ -585,40 +592,34 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin
             globusLogger = logger;
         }
 
-        globusLogger.info("Starting an globusUpload ");
-
-        String datasetIdentifier = dataset.getStorageIdentifier();
+        logger.fine("json: " + JsonUtil.prettyPrint(jsonData));
 
-        // ToDo - use DataAccess methods?
-        String storageType = datasetIdentifier.substring(0, datasetIdentifier.indexOf("://") + 3);
-        datasetIdentifier = datasetIdentifier.substring(datasetIdentifier.indexOf("://") + 3);
+        String taskIdentifier = jsonData.getString("taskIdentifier");
 
-        Thread.sleep(5000);
-
-        JsonObject jsonObject = null;
-        try (StringReader rdr = new StringReader(jsonData)) {
-            jsonObject = Json.createReader(rdr).readObject();
-        } catch (Exception jpe) {
-            jpe.printStackTrace();
-            logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}");
-        }
-        logger.info("json: " + JsonUtil.prettyPrint(jsonObject));
-
-        String taskIdentifier = jsonObject.getString("taskIdentifier");
-
-        String ruleId = "";
-        try {
-            ruleId = jsonObject.getString("ruleId");
-        } catch (NullPointerException npe) {
-            logger.warning("NPE for jsonData object");
+        GlobusEndpoint endpoint = getGlobusEndpoint(dataset);
+        GlobusTask task = getTask(endpoint.getClientToken(), taskIdentifier, globusLogger);
+        String ruleId = getRuleId(endpoint, task.getOwner_id(), "rw");
+        logger.fine("Found rule: " + ruleId);
+        if (ruleId != null) {
+            Long datasetId = rulesCache.getIfPresent(ruleId);
+            if (datasetId != null) {
+                // Will not delete rule
+                rulesCache.invalidate(ruleId);
+            }
         }
 
+        // Wait before first check
+        Thread.sleep(5000);
         // globus task status check
-        GlobusTask task = globusStatusCheck(taskIdentifier, globusLogger);
+        task = globusStatusCheck(endpoint, taskIdentifier, globusLogger);
         String taskStatus = getTaskStatus(task);
 
-        if (ruleId.length() > 0) {
-            deletePermision(ruleId, globusLogger);
+        globusLogger.info("Starting a globusUpload ");
+
+        if (ruleId != null) {
+            // Transfer is complete, so delete rule
+            deletePermission(ruleId, dataset, globusLogger);
+
         }
 
         // If success, switch to an EditInProgress lock - do this before removing the
@@ -660,21 +661,30 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin
                 //
 
                 List<String> inputList = new ArrayList<String>();
-                JsonArray filesJsonArray = jsonObject.getJsonArray("files");
+                JsonArray filesJsonArray = jsonData.getJsonArray("files");
 
                 if (filesJsonArray != null) {
+                    String datasetIdentifier = dataset.getAuthorityForFileStorage() + "/"
+                            + dataset.getIdentifierForFileStorage();
 
                     for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) {
 
                         // storageIdentifier s3://gcs5-bucket1:1781cfeb8a7-748c270a227c from
                         // externalTool
                         String storageIdentifier = fileJsonObject.getString("storageIdentifier");
-                        String[] bits = storageIdentifier.split(":");
-                        String bucketName = bits[1].replace("/", "");
+                        String[] parts = DataAccess.getDriverIdAndStorageLocation(storageIdentifier);
+                        String storeId = parts[0];
+                        // If this is an S3 store, we need to split out the bucket name
+                        String[] bits = parts[1].split(":");
+                        String bucketName = "";
+                        if (bits.length > 1) {
+                            bucketName = bits[0];
+                        }
                         String fileId = bits[bits.length - 1];
 
                         // fullpath s3://gcs5-bucket1/10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873
-                        String fullPath = storageType + bucketName + "/" + datasetIdentifier + "/" + fileId;
+                        // or globus:///10.5072/FK2/3S6G2E/1781cfeb8a7-4ad9418a5873
+                        String fullPath = storeId + "://" + bucketName + "/" + datasetIdentifier + "/" + fileId;
                         String fileName = fileJsonObject.getString("fileName");
 
                         inputList.add(fileId + "IDsplit" + fullPath + "IDsplit" + fileName);
@@ -683,7 +693,8 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin
                     // calculateMissingMetadataFields: checksum, mimetype
                     JsonObject newfilesJsonObject = calculateMissingMetadataFields(inputList, globusLogger);
                     JsonArray newfilesJsonArray = newfilesJsonObject.getJsonArray("files");
-
+                    logger.fine("Size: " + newfilesJsonArray.size());
+                    logger.fine("Val: " + JsonUtil.prettyPrint(newfilesJsonArray.getJsonObject(0)));
                     JsonArrayBuilder jsonDataSecondAPI = Json.createArrayBuilder();
 
                     for (JsonObject fileJsonObject : filesJsonArray.getValuesAs(JsonObject.class)) {
@@ -691,29 +702,33 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin
                         countAll++;
                         String storageIdentifier = fileJsonObject.getString("storageIdentifier");
                         String fileName = fileJsonObject.getString("fileName");
-                        String directoryLabel = fileJsonObject.getString("directoryLabel");
-                        String[] bits = storageIdentifier.split(":");
+                        String[] parts = DataAccess.getDriverIdAndStorageLocation(storageIdentifier);
+                        // If this is an S3 store, we need to split out the bucket name
+                        String[] bits = parts[1].split(":");
+                        if (bits.length > 1) {
+                        }
                         String fileId = bits[bits.length - 1];
 
                         List<JsonObject> newfileJsonObject = IntStream.range(0, newfilesJsonArray.size())
                                 .mapToObj(index -> ((JsonObject) newfilesJsonArray.get(index)).getJsonObject(fileId))
                                 .filter(Objects::nonNull).collect(Collectors.toList());
-
                         if (newfileJsonObject != null) {
-                            if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) {
-                                JsonPatch path = Json.createPatchBuilder()
-                                        .add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build();
-                                fileJsonObject = path.apply(fileJsonObject);
-                                path = Json.createPatchBuilder()
-                                        .add("/mimeType", newfileJsonObject.get(0).getString("mime")).build();
-                                fileJsonObject = path.apply(fileJsonObject);
-                                jsonDataSecondAPI.add(fileJsonObject);
-                                countSuccess++;
-                            } else {
-                                globusLogger.info(fileName
-                                        + " will be skipped from adding to dataset by second API due to missing values ");
-                                countError++;
-                            }
+                            logger.info("List Size: " + newfileJsonObject.size());
+                            // if (!newfileJsonObject.get(0).getString("hash").equalsIgnoreCase("null")) {
+                            JsonPatch path = Json.createPatchBuilder()
+                                    .add("/md5Hash", newfileJsonObject.get(0).getString("hash")).build();
+                            fileJsonObject = path.apply(fileJsonObject);
+                            path = Json.createPatchBuilder()
+                                    .add("/mimeType", newfileJsonObject.get(0).getString("mime")).build();
+                            fileJsonObject = path.apply(fileJsonObject);
+                            jsonDataSecondAPI.add(fileJsonObject);
+                            countSuccess++;
+                            // } else {
+                            // globusLogger.info(fileName
+                            // + " will be skipped from adding to dataset by second API due to missing
+                            // values ");
+                            // countError++;
+                            // }
                         } else {
                             globusLogger.info(fileName
                                     + " will be skipped from adding to dataset by second API due to missing values ");
@@ -730,6 +745,9 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin
                             + datasetIdentifier + " -F jsonData='" + newjsonData + "'";
                     System.out.println("*******====command ==== " + command);
 
+                    // ToDo - refactor to call AddReplaceFileHelper.addFiles directly instead of
+                    // calling API
+
                     String output = addFilesAsync(command, globusLogger);
                     if (output.equalsIgnoreCase("ok")) {
                         // if(!taskSkippedFiles)
@@ -756,10 +774,6 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin
                 globusLogger.info("Files failures: " + countError.toString());
                 globusLogger.info("Finished upload via Globus job.");
 
-                if (fileHandlerSuceeded) {
-                    fileHandler.close();
-                }
-
             } catch (Exception e) {
                 logger.info("Exception from globusUpload call ");
                 e.printStackTrace();
@@ -767,6 +781,13 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin
                 datasetSvc.removeDatasetLocks(dataset, DatasetLock.Reason.EditInProgress);
             }
         }
+        if (ruleId != null) {
+            deletePermission(ruleId, dataset, globusLogger);
+            globusLogger.info("Removed upload permission: " + ruleId);
+        }
+        if (fileHandlerSuceeded) {
+            fileHandler.close();
+        }
     }
 
     public String addFilesAsync(String curlCommand, Logger globusLogger)
@@ -808,17 +829,16 @@ private String addFiles(String curlCommand, Logger globusLogger) {
                 sb.append(line);
             globusLogger.info(" API Output :  " + sb.toString());
             JsonObject jsonObject = null;
-            try (StringReader rdr = new StringReader(sb.toString())) {
-                jsonObject = Json.createReader(rdr).readObject();
-            } catch (Exception jpe) {
-                jpe.printStackTrace();
-                globusLogger.log(Level.SEVERE, "Error parsing dataset json.");
-            }
+            jsonObject = JsonUtil.getJsonObject(sb.toString());
 
             status = jsonObject.getString("status");
         } catch (Exception ex) {
-            globusLogger.log(Level.SEVERE,
-                    "******* Unexpected Exception while executing api/datasets/:persistentId/add call ", ex);
+            if (ex instanceof JsonParsingException) {
+                globusLogger.log(Level.SEVERE, "Error parsing dataset json.");
+            } else {
+                globusLogger.log(Level.SEVERE,
+                        "******* Unexpected Exception while executing api/datasets/:persistentId/add call ", ex);
+            }
         }
 
         return status;
@@ -850,31 +870,47 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro
             globusLogger = logger;
         }
 
-        globusLogger.info("Starting an globusDownload ");
+        globusLogger.info("Starting a globusDownload ");
 
         JsonObject jsonObject = null;
-        try (StringReader rdr = new StringReader(jsonData)) {
-            jsonObject = Json.createReader(rdr).readObject();
+        try {
+            jsonObject = JsonUtil.getJsonObject(jsonData);
         } catch (Exception jpe) {
             jpe.printStackTrace();
-            globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}");
+            globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData);
+            // TODO: stop the process after this parsing exception.
         }
 
         String taskIdentifier = jsonObject.getString("taskIdentifier");
-        String ruleId = "";
 
-        try {
-            jsonObject.getString("ruleId");
-        } catch (NullPointerException npe) {
-
-        }
+        GlobusEndpoint endpoint = getGlobusEndpoint(dataset);
+        logger.info("Endpoint path: " + endpoint.getBasePath());
 
+        // If the rules_cache times out, the permission will be deleted. Presumably that
+        // doesn't affect a
         // globus task status check
-        GlobusTask task = globusStatusCheck(taskIdentifier, globusLogger);
+        GlobusTask task = getTask(endpoint.getClientToken(), taskIdentifier, globusLogger);
+        String ruleId = getRuleId(endpoint, task.getOwner_id(), "r");
+        if (ruleId != null) {
+            logger.info("Found rule: " + ruleId);
+            Long datasetId = rulesCache.getIfPresent(ruleId);
+            if (datasetId != null) {
+                logger.info("Deleting from cache: rule: " + ruleId);
+                // Will not delete rule
+                rulesCache.invalidate(ruleId);
+            }
+        } else {
+            // Something is wrong - the rule should be there (a race with the cache timing
+            // out?)
+            logger.warning("ruleId not found for taskId: " + taskIdentifier);
+        }
+        task = globusStatusCheck(endpoint, taskIdentifier, globusLogger);
         String taskStatus = getTaskStatus(task);
 
-        if (ruleId.length() > 0) {
-            deletePermision(ruleId, globusLogger);
+        // Transfer is done (success or failure) so delete the rule
+        if (ruleId != null) {
+            logger.info("Deleting: rule: " + ruleId);
+            deletePermission(ruleId, dataset, globusLogger);
         }
 
         if (taskStatus.startsWith("FAILED") || taskStatus.startsWith("INACTIVE")) {
@@ -899,18 +935,18 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro
 
     Executor executor = Executors.newFixedThreadPool(10);
 
-    private GlobusTask globusStatusCheck(String taskId, Logger globusLogger) throws MalformedURLException {
+    private GlobusTask globusStatusCheck(GlobusEndpoint endpoint, String taskId, Logger globusLogger)
+            throws MalformedURLException {
         boolean taskCompletion = false;
         String status = "";
         GlobusTask task = null;
-        int pollingInterval = SystemConfig.getIntLimitFromStringOrDefault(settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusPollingInterval), 50);
+        int pollingInterval = SystemConfig.getIntLimitFromStringOrDefault(
+                settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusPollingInterval), 50);
         do {
             try {
                 globusLogger.info("checking globus transfer task   " + taskId);
                 Thread.sleep(pollingInterval * 1000);
-                AccessToken clientTokenUser = getClientToken();
-                // success = globusServiceBean.getSuccessfulTransfers(clientTokenUser, taskId);
-                task = getTask(clientTokenUser, taskId, globusLogger);
+                task = getTask(endpoint.getClientToken(), taskId, globusLogger);
                 if (task != null) {
                     status = task.getStatus();
                     if (status != null) {
@@ -953,7 +989,7 @@ private String getTaskStatus(GlobusTask task) {
         if (task != null) {
             status = task.getStatus();
             if (status != null) {
-                // The task is in progress.
+                // The task is in progress but is not ok or queued
                 if (status.equalsIgnoreCase("ACTIVE")) {
                     status = "FAILED" + "#" + task.getNice_status() + "#" + task.getNice_status_short_description();
                 } else {
@@ -983,7 +1019,7 @@ public JsonObject calculateMissingMetadataFields(List<String> inputList, Logger
                     .collect(Collectors.toList());
         });
 
-        CompletableFuture completableFuture = allCompletableFuture.thenApply(files -> {
+        CompletableFuture<?> completableFuture = allCompletableFuture.thenApply(files -> {
             return files.stream().map(d -> json(d)).collect(toJsonArray());
         });
 
@@ -1024,7 +1060,7 @@ private FileDetailsHolder calculateDetails(String id, Logger globusLogger)
         String fullPath = id.split("IDsplit")[1];
         String fileName = id.split("IDsplit")[2];
 
-        // ToDo: what if the file doesnot exists in s3
+        // ToDo: what if the file does not exist in s3
         // ToDo: what if checksum calculation failed
 
         do {
@@ -1036,8 +1072,8 @@ private FileDetailsHolder calculateDetails(String id, Logger globusLogger)
             } catch (IOException ioex) {
                 count = 3;
                 logger.info(ioex.getMessage());
-                globusLogger.info("S3AccessIO: DataFile (fullPAth " + fullPath
-                        + ") does not appear to be an S3 object associated with driver: ");
+                globusLogger.info(
+                        "DataFile (fullPath " + fullPath + ") does not appear to be accessible within Dataverse: ");
             } catch (Exception ex) {
                 count = count + 1;
                 ex.printStackTrace();
@@ -1048,7 +1084,7 @@ private FileDetailsHolder calculateDetails(String id, Logger globusLogger)
         } while (count < 3);
 
         if (checksumVal.length() == 0) {
-            checksumVal = "NULL";
+            checksumVal = "Not available in Dataverse";
         }
 
         String mimeType = calculatemime(fileName);
@@ -1064,7 +1100,7 @@ public String calculatemime(String fileName) throws InterruptedException {
         String finalType = FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT;
         String type = FileUtil.determineFileTypeByNameAndExtension(fileName);
 
-        if (type!=null && !type.isBlank()) {
+        if (type != null && !type.isBlank()) {
             if (FileUtil.useRecognizedType(finalType, type)) {
                 finalType = type;
             }
@@ -1072,194 +1108,106 @@ public String calculatemime(String fileName) throws InterruptedException {
 
         return finalType;
     }
-    /*
-     * public boolean globusFinishTransfer(Dataset dataset, AuthenticatedUser user)
-     * throws MalformedURLException {
-     * 
-     * logger.info("=====Tasklist == dataset id :" + dataset.getId()); String
-     * directory = null;
-     * 
-     * try {
-     * 
-     * List<FileMetadata> fileMetadatas = new ArrayList<>();
-     * 
-     * StorageIO<Dataset> datasetSIO = DataAccess.getStorageIO(dataset);
-     * 
-     * 
-     * 
-     * DatasetVersion workingVersion = dataset.getEditVersion();
-     * 
-     * if (workingVersion.getCreateTime() != null) {
-     * workingVersion.setCreateTime(new Timestamp(new Date().getTime())); }
-     * 
-     * directory = dataset.getAuthorityForFileStorage() + "/" +
-     * dataset.getIdentifierForFileStorage();
-     * 
-     * System.out.println("======= directory ==== " + directory +
-     * " ====  datasetId :" + dataset.getId()); Map<String, Integer> checksumMapOld
-     * = new HashMap<>();
-     * 
-     * Iterator<FileMetadata> fmIt = workingVersion.getFileMetadatas().iterator();
-     * 
-     * while (fmIt.hasNext()) { FileMetadata fm = fmIt.next(); if (fm.getDataFile()
-     * != null && fm.getDataFile().getId() != null) { String chksum =
-     * fm.getDataFile().getChecksumValue(); if (chksum != null) {
-     * checksumMapOld.put(chksum, 1); } } }
-     * 
-     * List<DataFile> dFileList = new ArrayList<>(); boolean update = false; for
-     * (S3ObjectSummary s3ObjectSummary : datasetSIO.listAuxObjects("")) {
-     * 
-     * String s3ObjectKey = s3ObjectSummary.getKey();
-     * 
-     * 
-     * String t = s3ObjectKey.replace(directory, "");
-     * 
-     * if (t.indexOf(".") > 0) { long totalSize = s3ObjectSummary.getSize(); String
-     * filePath = s3ObjectKey; String fileName =
-     * filePath.split("/")[filePath.split("/").length - 1]; String fullPath =
-     * datasetSIO.getStorageLocation() + "/" + fileName;
-     * 
-     * logger.info("Full path " + fullPath); StorageIO<DvObject> dataFileStorageIO =
-     * DataAccess.getDirectStorageIO(fullPath); InputStream in =
-     * dataFileStorageIO.getInputStream();
-     * 
-     * String checksumVal = FileUtil.calculateChecksum(in,
-     * DataFile.ChecksumType.MD5); //String checksumVal = s3ObjectSummary.getETag();
-     * logger.info("The checksum is " + checksumVal); if
-     * ((checksumMapOld.get(checksumVal) != null)) { logger.info("datasetId :" +
-     * dataset.getId() + "======= filename ==== " + filePath +
-     * " == file already exists "); } else if (filePath.contains("cached") ||
-     * filePath.contains(".thumb")) { logger.info(filePath + " is ignored"); } else
-     * { update = true; logger.info("datasetId :" + dataset.getId() +
-     * "======= filename ==== " + filePath + " == new file   "); try {
-     * 
-     * DataFile datafile = new DataFile(DataFileServiceBean.MIME_TYPE_GLOBUS_FILE);
-     * //MIME_TYPE_GLOBUS datafile.setModificationTime(new Timestamp(new
-     * Date().getTime())); datafile.setCreateDate(new Timestamp(new
-     * Date().getTime())); datafile.setPermissionModificationTime(new Timestamp(new
-     * Date().getTime()));
-     * 
-     * FileMetadata fmd = new FileMetadata();
-     * 
-     * 
-     * fmd.setLabel(fileName); fmd.setDirectoryLabel(filePath.replace(directory,
-     * "").replace(File.separator + fileName, ""));
-     * 
-     * fmd.setDataFile(datafile);
-     * 
-     * datafile.getFileMetadatas().add(fmd);
-     * 
-     * FileUtil.generateS3PackageStorageIdentifierForGlobus(datafile);
-     * logger.info("====  datasetId :" + dataset.getId() + "======= filename ==== "
-     * + filePath + " == added to datafile, filemetadata   ");
-     * 
-     * try { // We persist "SHA1" rather than "SHA-1".
-     * //datafile.setChecksumType(DataFile.ChecksumType.SHA1);
-     * datafile.setChecksumType(DataFile.ChecksumType.MD5);
-     * datafile.setChecksumValue(checksumVal); } catch (Exception cksumEx) {
-     * logger.info("====  datasetId :" + dataset.getId() +
-     * "======Could not calculate  checksumType signature for the new file "); }
-     * 
-     * datafile.setFilesize(totalSize);
-     * 
-     * dFileList.add(datafile);
-     * 
-     * } catch (Exception ioex) { logger.info("datasetId :" + dataset.getId() +
-     * "======Failed to process and/or save the file " + ioex.getMessage()); return
-     * false;
-     * 
-     * } } } } if (update) {
-     * 
-     * List<DataFile> filesAdded = new ArrayList<>();
-     * 
-     * if (dFileList != null && dFileList.size() > 0) {
-     * 
-     * // Dataset dataset = version.getDataset();
-     * 
-     * for (DataFile dataFile : dFileList) {
-     * 
-     * if (dataFile.getOwner() == null) { dataFile.setOwner(dataset);
-     * 
-     * workingVersion.getFileMetadatas().add(dataFile.getFileMetadata());
-     * dataFile.getFileMetadata().setDatasetVersion(workingVersion);
-     * dataset.getFiles().add(dataFile);
-     * 
-     * }
-     * 
-     * filesAdded.add(dataFile);
-     * 
-     * }
-     * 
-     * logger.info("====  datasetId :" + dataset.getId() +
-     * " ===== Done! Finished saving new files to the dataset."); }
-     * 
-     * fileMetadatas.clear(); for (DataFile addedFile : filesAdded) {
-     * fileMetadatas.add(addedFile.getFileMetadata()); } filesAdded = null;
-     * 
-     * if (workingVersion.isDraft()) {
-     * 
-     * logger.info("Async: ====  datasetId :" + dataset.getId() +
-     * " ==== inside draft version ");
-     * 
-     * Timestamp updateTime = new Timestamp(new Date().getTime());
-     * 
-     * workingVersion.setLastUpdateTime(updateTime);
-     * dataset.setModificationTime(updateTime);
-     * 
-     * 
-     * for (FileMetadata fileMetadata : fileMetadatas) {
-     * 
-     * if (fileMetadata.getDataFile().getCreateDate() == null) {
-     * fileMetadata.getDataFile().setCreateDate(updateTime);
-     * fileMetadata.getDataFile().setCreator((AuthenticatedUser) user); }
-     * fileMetadata.getDataFile().setModificationTime(updateTime); }
-     * 
-     * 
-     * } else { logger.info("datasetId :" + dataset.getId() +
-     * " ==== inside released version ");
-     * 
-     * for (int i = 0; i < workingVersion.getFileMetadatas().size(); i++) { for
-     * (FileMetadata fileMetadata : fileMetadatas) { if
-     * (fileMetadata.getDataFile().getStorageIdentifier() != null) {
-     * 
-     * if (fileMetadata.getDataFile().getStorageIdentifier().equals(workingVersion.
-     * getFileMetadatas().get(i).getDataFile().getStorageIdentifier())) {
-     * workingVersion.getFileMetadatas().set(i, fileMetadata); } } } }
-     * 
-     * 
-     * }
-     * 
-     * 
-     * try { Command<Dataset> cmd; logger.info("Async: ====  datasetId :" +
-     * dataset.getId() +
-     * " ======= UpdateDatasetVersionCommand START in globus function "); cmd = new
-     * UpdateDatasetVersionCommand(dataset, new DataverseRequest(user,
-     * (HttpServletRequest) null)); ((UpdateDatasetVersionCommand)
-     * cmd).setValidateLenient(true); //new DataverseRequest(authenticatedUser,
-     * (HttpServletRequest) null) //dvRequestService.getDataverseRequest()
-     * commandEngine.submit(cmd); } catch (CommandException ex) {
-     * logger.log(Level.WARNING, "====  datasetId :" + dataset.getId() +
-     * "======CommandException updating DatasetVersion from batch job: " +
-     * ex.getMessage()); return false; }
-     * 
-     * logger.info("====  datasetId :" + dataset.getId() +
-     * " ======= GLOBUS  CALL COMPLETED SUCCESSFULLY ");
-     * 
-     * //return true; }
-     * 
-     * } catch (Exception e) { String message = e.getMessage();
-     * 
-     * logger.info("====  datasetId :" + dataset.getId() +
-     * " ======= GLOBUS  CALL Exception ============== " + message);
-     * e.printStackTrace(); return false; //return
-     * error(Response.Status.INTERNAL_SERVER_ERROR,
-     * "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '"
-     * + message + "'."); }
-     * 
-     * String globusBasicToken =
-     * settingsSvc.getValueForKey(SettingsServiceBean.Key.GlobusBasicToken, "");
-     * AccessToken clientTokenUser = getClientToken(globusBasicToken);
-     * updatePermision(clientTokenUser, directory, "identity", "r"); return true; }
-     * 
-     */
+
+    private GlobusEndpoint getGlobusEndpoint(DvObject dvObject) {
+        Dataset dataset = null;
+        if (dvObject instanceof Dataset) {
+            dataset = (Dataset) dvObject;
+        } else if (dvObject instanceof DataFile) {
+            dataset = (Dataset) dvObject.getOwner();
+        } else {
+            throw new IllegalArgumentException("Unsupported DvObject type: " + dvObject.getClass().getName());
+        }
+        String driverId = dataset.getEffectiveStorageDriverId();
+        GlobusEndpoint endpoint = null;
+
+        String directoryPath = GlobusAccessibleStore.getTransferPath(driverId);
+
+        if (GlobusAccessibleStore.isDataverseManaged(driverId) && (dataset != null)) {
+            directoryPath = directoryPath + "/" + dataset.getAuthorityForFileStorage() + "/"
+                    + dataset.getIdentifierForFileStorage();
+        } else {
+            // remote store - may have path in file storageidentifier
+            String relPath = dvObject.getStorageIdentifier()
+                    .substring(dvObject.getStorageIdentifier().lastIndexOf("//") + 2);
+            int filenameStart = relPath.lastIndexOf("/") + 1;
+            if (filenameStart > 0) {
+                directoryPath = directoryPath + relPath.substring(0, filenameStart);
+            }
+        }
+        logger.fine("directoryPath finally: " + directoryPath);
+
+        String endpointId = GlobusAccessibleStore.getTransferEndpointId(driverId);
+
+        logger.fine("endpointId: " + endpointId);
+
+        String globusToken = GlobusAccessibleStore.getGlobusToken(driverId);
+
+        AccessToken accessToken = GlobusServiceBean.getClientToken(globusToken);
+        String clientToken = accessToken.getOtherTokens().get(0).getAccessToken();
+        endpoint = new GlobusEndpoint(endpointId, clientToken, directoryPath);
+
+        return endpoint;
+    }
+
+    // This helper method is called from the Download terms/guestbook/etc. popup,
+    // when the user clicks the "ok" button. We use it, instead of calling
+    // downloadServiceBean directly, in order to differentiate between single
+    // file downloads and multiple (batch) downloads - since both use the same
+    // terms/etc. popup.
+    public void writeGuestbookAndStartTransfer(GuestbookResponse guestbookResponse,
+            boolean doNotSaveGuestbookResponse) {
+        PrimeFaces.current().executeScript("PF('guestbookAndTermsPopup').hide()");
+        guestbookResponse.setEventType(GuestbookResponse.DOWNLOAD);
+
+        ApiToken apiToken = null;
+        User user = session.getUser();
+        if (user instanceof AuthenticatedUser) {
+            apiToken = authSvc.findApiTokenByUser((AuthenticatedUser) user);
+        } else if (user instanceof PrivateUrlUser) {
+            PrivateUrlUser privateUrlUser = (PrivateUrlUser) user;
+            PrivateUrl privUrl = privateUrlService.getPrivateUrlFromDatasetId(privateUrlUser.getDatasetId());
+            apiToken = new ApiToken();
+            apiToken.setTokenString(privUrl.getToken());
+        }
+
+        DataFile df = guestbookResponse.getDataFile();
+        if (df != null) {
+            logger.fine("Single datafile case for writeGuestbookAndStartTransfer");
+            List<DataFile> downloadDFList = new ArrayList<DataFile>(1);
+            downloadDFList.add(df);
+            if (!doNotSaveGuestbookResponse) {
+                fileDownloadService.writeGuestbookResponseRecord(guestbookResponse);
+            }
+            PrimeFaces.current().executeScript(getGlobusDownloadScript(df.getOwner(), apiToken, downloadDFList));
+        } else {
+            // Following FileDownloadServiceBean writeGuestbookAndStartBatchDownload
+            List<String> list = new ArrayList<>(Arrays.asList(guestbookResponse.getSelectedFileIds().split(",")));
+            List<DataFile> selectedFiles = new ArrayList<DataFile>();
+            for (String idAsString : list) {
+                try {
+                    Long fileId = Long.parseLong(idAsString);
+                    // If we need to create a GuestBookResponse record, we have to
+                    // look up the DataFile object for this file:
+                    if (!doNotSaveGuestbookResponse) {
+                        df = dataFileService.findCheapAndEasy(fileId);
+                        guestbookResponse.setDataFile(df);
+                        fileDownloadService.writeGuestbookResponseRecord(guestbookResponse);
+                        selectedFiles.add(df);
+                    }
+                } catch (NumberFormatException nfe) {
+                    logger.warning(
+                            "A file id passed to the writeGuestbookAndStartTransfer method as a string could not be converted back to Long: "
+                                    + idAsString);
+                    return;
+                }
+
+            }
+            if (!selectedFiles.isEmpty()) {
+                // Use dataset from one file - files should all be from the same dataset
+                PrimeFaces.current().executeScript(getGlobusDownloadScript(df.getOwner(), apiToken, selectedFiles));
+            }
+        }
+    }
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusUtil.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusUtil.java
new file mode 100644
index 00000000000..92cf8ac7704
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusUtil.java
@@ -0,0 +1,33 @@
+package edu.harvard.iq.dataverse.globus;
+
+import java.util.List;
+
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+
+public class GlobusUtil {
+
+    public static JsonObject getFilesMap(List<DataFile> dataFiles, Dataset d) {
+        JsonObjectBuilder filesBuilder = Json.createObjectBuilder();
+        for (DataFile df : dataFiles) {
+            String storageId = df.getStorageIdentifier();
+            String[] parts = DataAccess
+                    .getDriverIdAndStorageLocation(DataAccess.getLocationFromStorageId(storageId, d));
+            String driverId = parts[0];
+            String fileLocation = parts[1];
+            if (GlobusAccessibleStore.isDataverseManaged(driverId)) {
+                String endpointWithBasePath = GlobusAccessibleStore.getTransferEnpointWithPath(driverId);
+                fileLocation = endpointWithBasePath + "/" + fileLocation;
+            } else {
+                fileLocation = storageId.substring(storageId.lastIndexOf("//") + 2);
+            }
+            filesBuilder.add(df.getId().toString(), fileLocation);
+        }
+        return filesBuilder.build();
+    }
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java
index 50d06807a13..ba6f5c3dec2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java
@@ -7,14 +7,14 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
index 40bd45ecb30..20884e3360c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java
@@ -21,13 +21,13 @@
 import java.util.logging.FileHandler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.Resource;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.ejb.Timer;
-import javax.inject.Named;
+import jakarta.annotation.Resource;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.Timer;
+import jakarta.inject.Named;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.transform.TransformerException;
 import org.apache.commons.lang3.mutable.MutableBoolean;
@@ -51,8 +51,8 @@
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.nio.file.Path;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
@@ -69,7 +69,7 @@ public class HarvesterServiceBean {
     @EJB
     DatasetServiceBean datasetService;
     @Resource
-    javax.ejb.TimerService timerService;
+    jakarta.ejb.TimerService timerService;
     @EJB
     DataverseTimerServiceBean dataverseTimerService;
     @EJB
@@ -148,12 +148,12 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId
                 
         String logTimestamp = logFormatter.format(new Date());
         Logger hdLogger = Logger.getLogger("edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean." + harvestingClientConfig.getName() + logTimestamp);
-        String logFileName = "../logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log";
+        String logFileName = System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs" + File.separator + "harvest_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".log";
         FileHandler fileHandler = new FileHandler(logFileName);
         hdLogger.setUseParentHandlers(false);
         hdLogger.addHandler(fileHandler);
         
-        PrintWriter importCleanupLog = new PrintWriter(new FileWriter( "../logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp+".txt"));
+        PrintWriter importCleanupLog = new PrintWriter(new FileWriter(System.getProperty("com.sun.aas.instanceRoot") + File.separator + "logs/harvest_cleanup_" + harvestingClientConfig.getName() + "_" + logTimestamp + ".txt"));
         
         
         List<Long> harvestedDatasetIds = new ArrayList<>();
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java
index d27ddc41b7f..40db55f2a0c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java
@@ -17,25 +17,25 @@
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OneToOne;
-import javax.persistence.OrderBy;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
-import javax.validation.constraints.Pattern;
-import javax.validation.constraints.Size;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.OrderBy;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.Size;
 import org.hibernate.validator.constraints.NotBlank;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
index 13cc44ce919..7ec6d75a41c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java
@@ -5,25 +5,23 @@
 import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
-import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import edu.harvard.iq.dataverse.engine.command.impl.DeleteHarvestingClientCommand;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java
index 49e40e786ea..94753d8594d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecord.java
@@ -21,12 +21,12 @@
 
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java
index 5a8f2f41d31..1b4a7bc7db0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAIRecordServiceBean.java
@@ -8,32 +8,27 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
+import io.gdcc.spi.export.ExportException;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.time.Instant;
-import java.io.File;
-import java.io.IOException;
-import java.sql.Timestamp;
-import java.text.SimpleDateFormat;
 import java.util.Collection;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.logging.FileHandler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
-import javax.persistence.TemporalType;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+import jakarta.persistence.TemporalType;
 
 /**
  *
@@ -113,7 +108,7 @@ public void updateOaiRecords(String setName, List<Long> datasetIds, Date updateT
                                 && (dataset.getLastExportTime() == null
                                 || dataset.getLastExportTime().before(publicationDate))) {
 
-                            setUpdateLogger.fine("Attempting to run export on dataset " + dataset.getGlobalIdString());
+                            setUpdateLogger.fine("Attempting to run export on dataset " + dataset.getGlobalId().asString());
                             exportAllFormats(dataset);
                         }
                         
@@ -147,14 +142,14 @@ public void updateOaiRecordForDataset(Dataset dataset, String setName, Map<Strin
         boolean isReleased = dataset.getReleasedVersion() != null;
         
         if (isReleased && dataset.getLastExportTime() != null) {
-            OAIRecord record = recordMap.get(dataset.getGlobalIdString());
+            OAIRecord record = recordMap.get(dataset.getGlobalId().asString());
             if (record == null) {
-                setUpdateLogger.info("creating a new OAI Record for " + dataset.getGlobalIdString());
-                record = new OAIRecord(setName, dataset.getGlobalIdString(), new Date());
+                setUpdateLogger.info("creating a new OAI Record for " + dataset.getGlobalId().asString());
+                record = new OAIRecord(setName, dataset.getGlobalId().asString(), new Date());
                 em.persist(record);
             } else {
                 if (record.isRemoved()) {
-                    setUpdateLogger.info("\"un-deleting\" an existing OAI Record for " + dataset.getGlobalIdString());
+                    setUpdateLogger.info("\"un-deleting\" an existing OAI Record for " + dataset.getGlobalId().asString());
                     record.setRemoved(false);
                     record.setLastUpdateTime(new Date());
                 } else if (dataset.getLastExportTime().after(record.getLastUpdateTime())) {
@@ -181,7 +176,7 @@ record = new OAIRecord(setName, dataset.getGlobalIdString(), new Date());
     public void updateOaiRecordsForDataset(Dataset dataset) {
         // create Map of OaiRecords
 
-        List<OAIRecord> oaiRecords = findOaiRecordsByGlobalId(dataset.getGlobalIdString());
+        List<OAIRecord> oaiRecords = findOaiRecordsByGlobalId(dataset.getGlobalId().asString());
         if (oaiRecords != null) {
 
             DatasetVersion releasedVersion = dataset.getReleasedVersion();
@@ -195,7 +190,7 @@ public void updateOaiRecordsForDataset(Dataset dataset) {
             
             for (OAIRecord record : oaiRecords) {
                 if (record.isRemoved()) {
-                    logger.fine("\"un-deleting\" an existing OAI Record for " + dataset.getGlobalIdString());
+                    logger.fine("\"un-deleting\" an existing OAI Record for " + dataset.getGlobalId().asString());
                     record.setRemoved(false);
                     record.setLastUpdateTime(new Date());
                 } else if (dataset.getLastExportTime().after(record.getLastUpdateTime())) {
@@ -245,7 +240,7 @@ public void exportAllFormatsInNewTransaction(Dataset dataset) throws ExportExcep
             exportServiceInstance.exportAllFormats(dataset);
             dataset = datasetService.merge(dataset);
         } catch (Exception e) {
-            logger.fine("Caught unknown exception while trying to export");
+            logger.log(Level.FINE, "Caught unknown exception while trying to export", e);
             throw new ExportException(e.getMessage());
         }
     }
@@ -265,7 +260,7 @@ public OAIRecord findOAIRecordBySetNameandGlobalId(String setName, String global
         
         try {
            oaiRecord = (OAIRecord) query.setMaxResults(1).getSingleResult();
-        } catch (javax.persistence.NoResultException e) {
+        } catch (jakarta.persistence.NoResultException e) {
            // Do nothing, just return null. 
         }
         logger.fine("returning oai record.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java
index 038bb66de32..8d6b04effef 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISet.java
@@ -20,16 +20,16 @@
 package edu.harvard.iq.dataverse.harvest.server;
 
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
-import javax.persistence.Version;
-import javax.validation.constraints.Pattern;
-import javax.validation.constraints.Size;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Version;
+import jakarta.validation.constraints.Pattern;
+import jakarta.validation.constraints.Size;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java
index 6b28c8808a0..2bd666401c7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/OAISetServiceBean.java
@@ -17,19 +17,17 @@
 import java.util.logging.FileHandler;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import org.apache.solr.client.solrj.SolrClient;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException;
+import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.SolrDocumentList;
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java
index 8840d433ae1..96a19acc0e8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/web/servlet/OAIServlet.java
@@ -20,9 +20,10 @@
 import io.gdcc.xoai.xml.XmlWriter;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
-import edu.harvard.iq.dataverse.export.spi.Exporter;
+import io.gdcc.spi.export.ExportException;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.OAISetServiceBean;
 import edu.harvard.iq.dataverse.harvest.server.xoai.DataverseXoaiItemRepository;
@@ -37,17 +38,16 @@
 
 
 import java.io.IOException;
-import java.time.Instant;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.inject.Inject;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
 import org.eclipse.microprofile.config.inject.ConfigProperty;
-import javax.mail.internet.InternetAddress;
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
+import jakarta.mail.internet.InternetAddress;
+import jakarta.servlet.ServletConfig;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.http.HttpServlet;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletResponse;
 import javax.xml.stream.XMLStreamException;
 import org.eclipse.microprofile.config.Config;
 import org.eclipse.microprofile.config.ConfigProvider;
@@ -154,18 +154,13 @@ private void addSupportedMetadataFormats(Context context) {
                 exporter = null;
             }
 
-            if (exporter != null && exporter.isXMLFormat() && exporter.isHarvestable()) {
+            if (exporter != null && (exporter instanceof XMLExporter) && exporter.isHarvestable()) {
                 MetadataFormat metadataFormat;
 
-                try {
+                metadataFormat = MetadataFormat.metadataFormat(formatName);
+                metadataFormat.withNamespace(((XMLExporter) exporter).getXMLNameSpace());
+                metadataFormat.withSchemaLocation(((XMLExporter) exporter).getXMLSchemaLocation());
 
-                    metadataFormat = MetadataFormat.metadataFormat(formatName);
-                    metadataFormat.withNamespace(exporter.getXMLNameSpace());
-                    metadataFormat.withSchemaLocation(exporter.getXMLSchemaLocation());
-                    
-                } catch (ExportException ex) {
-                    metadataFormat = null;
-                }
                 if (metadataFormat != null) {
                     context.withMetadataFormat(metadataFormat);
                 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiItemRepository.java b/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiItemRepository.java
index 147d42648fa..2a659bb05e2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiItemRepository.java
+++ b/src/main/java/edu/harvard/iq/dataverse/harvest/server/xoai/DataverseXoaiItemRepository.java
@@ -9,8 +9,8 @@
 import io.gdcc.xoai.dataprovider.repository.ItemRepository;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
-import edu.harvard.iq.dataverse.export.ExportException;
 import edu.harvard.iq.dataverse.export.ExportService;
+import io.gdcc.spi.export.ExportException;
 import edu.harvard.iq.dataverse.harvest.server.OAIRecord;
 import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean;
 import edu.harvard.iq.dataverse.util.StringUtil;
@@ -215,7 +215,7 @@ private DataverseXoaiItem addMetadata(DataverseXoaiItem xoaiItem, MetadataFormat
                 try {
                     Metadata metadata = getDatasetMetadata(dataset, metadataFormat.getPrefix());
                     xoaiItem.withDataset(dataset).withMetadata(metadata);
-                } catch (ExportException | IOException ex) {
+                } catch (IOException ex) {
                     // This is not supposed to happen in normal operations; 
                     // since by design only the datasets for which the metadata
                     // records have been pre-generated ("exported") should be 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessage.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessage.java
index e9923012fad..b1c93e52ebd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessage.java
@@ -19,7 +19,6 @@
 */
 package edu.harvard.iq.dataverse.ingest;
 
-import edu.harvard.iq.dataverse.DataFile;
 import java.io.Serializable;
 import java.util.List;
 import java.util.ArrayList;
@@ -32,49 +31,21 @@
  * @author Leonid Andreev
  */
 public class IngestMessage implements Serializable {
-    public static final int INGEST_MESAGE_LEVEL_ERROR = 1; 
-    public static final int INGEST_MESAGE_LEVEL_INFO = 2;
-
     /** Creates a new instance of IngestMessage */
-    public IngestMessage()  {
-        this(INGEST_MESAGE_LEVEL_INFO);
-    }
 
-    public IngestMessage(int messageLevel)  {
-        this.messageLevel = messageLevel;
+    public IngestMessage()  {
         datafile_ids = new ArrayList<Long>();
     }
 
-    public IngestMessage(int messageLevel, Long authenticatedUserId) {
-        this.messageLevel = messageLevel;
+    public IngestMessage(Long authenticatedUserId) {
         this.authenticatedUserId = authenticatedUserId;
         datafile_ids = new ArrayList<Long>();
     }
-
-    private int messageLevel = INGEST_MESAGE_LEVEL_INFO;
     
     private Long datasetId;
-    private Long datasetVersionId;
-    private String versionNote;
-    private String datasetVersionNumber;
     private List<Long> datafile_ids;
     private Long authenticatedUserId;
-
-    public String getVersionNote() {
-        return versionNote;
-    }
-
-    public void setVersionNote(String versionNote) {
-        this.versionNote = versionNote;
-    }
-
-    public int getMessageLevel() {
-        return messageLevel;
-    }
-
-    public void setMessageLevel(int messageLevel) {
-        this.messageLevel = messageLevel;
-    }
+    private String info;
 
     public Long getDatasetId() {
         return datasetId;
@@ -83,30 +54,6 @@ public Long getDatasetId() {
     public void setDatasetId(Long datasetId) {
         this.datasetId = datasetId;
     }
-
-    public Long getDatasetVersionId() {
-        return datasetVersionId;
-    }
-
-    public void setDatasetVersionId(Long datasetVersionId) {
-        this.datasetVersionId = datasetVersionId;
-    }
-
-    public boolean sendInfoMessage() {
-        return messageLevel >= INGEST_MESAGE_LEVEL_INFO;
-    }
-
-    public boolean sendErrorMessage() {
-        return messageLevel >= INGEST_MESAGE_LEVEL_ERROR;
-    }
-
-    public String getDatasetVersionNumber() {
-        return datasetVersionNumber;
-    }
-
-    public void setDatasetVersionNumber(String datasetVersionNumber) {
-        this.datasetVersionNumber = datasetVersionNumber;
-    }
     
     public List<Long> getFileIds() {
         return datafile_ids; 
@@ -123,4 +70,12 @@ public void addFileId(Long file_id) {
     public Long getAuthenticatedUserId() {
         return authenticatedUserId;
     }
+
+    public void setInfo(String info) {
+        this.info = info;
+    }
+
+    public String getInfo() {
+        return info;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
index b029c0c97c5..f56fe608a52 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestMessageBean.java
@@ -29,16 +29,15 @@
 import java.time.Instant;
 import java.util.Iterator;
 import java.util.logging.Logger;
-import javax.ejb.ActivationConfigProperty;
-import javax.ejb.EJB;
-import javax.ejb.MessageDriven;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.jms.JMSException;
-import javax.jms.Message;
-import javax.jms.MessageListener;
-import javax.jms.ObjectMessage;
-
+import jakarta.ejb.ActivationConfigProperty;
+import jakarta.ejb.EJB;
+import jakarta.ejb.MessageDriven;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.jms.JMSException;
+import jakarta.jms.Message;
+import jakarta.jms.MessageListener;
+import jakarta.jms.ObjectMessage;
 /**
  *
  * This is an experimental, JMS-based implementation of asynchronous 
@@ -50,9 +49,10 @@
     mappedName = "java:app/jms/queue/ingest",
     activationConfig =  {
         @ActivationConfigProperty(propertyName = "acknowledgeMode", propertyValue = "Auto-acknowledge"),
-        @ActivationConfigProperty(propertyName = "destinationType", propertyValue = "javax.jms.Queue")
+        @ActivationConfigProperty(propertyName = "destinationType", propertyValue = "jakarta.jms.Queue")
     }
 )
+
 public class IngestMessageBean implements MessageListener {
     private static final Logger logger = Logger.getLogger(IngestMessageBean.class.getCanonicalName());
     @EJB DatasetServiceBean datasetService;
@@ -69,17 +69,27 @@ public IngestMessageBean() {
     public void onMessage(Message message) {
         IngestMessage ingestMessage = null;
 
-        Long datafile_id = null;
         AuthenticatedUser authenticatedUser = null;
         
         try {
             ObjectMessage om = (ObjectMessage) message;
             ingestMessage = (IngestMessage) om.getObject();
 
+            // if the lock was removed while an ingest was queued, ratake the lock
+            // The "if" is the first thing that addDatasetLock method does.
+            // It has some complexity and would result in the code duplication if repeated here.
+            // If that check would be removed from the addDatasetLock method in the future without
+            // updating the code using this method, ingest code would still not break because
+            // we remove "all" ingest locks at the end (right now, there can be at most one ingest lock).
+            datasetService.addDatasetLock(ingestMessage.getDatasetId(),
+                    DatasetLock.Reason.Ingest,
+                    ingestMessage.getAuthenticatedUserId(),
+                    ingestMessage.getInfo());
+
             authenticatedUser = authenticationServiceBean.findByID(ingestMessage.getAuthenticatedUserId());
 
-            Iterator iter = ingestMessage.getFileIds().iterator();
-            datafile_id = null;
+            Iterator<Long> iter = ingestMessage.getFileIds().iterator();
+            Long datafile_id = null;
 
             boolean ingestWithErrors = false;
 
@@ -87,7 +97,7 @@ public void onMessage(Message message) {
             sbIngestedFiles.append("<ul>");
             
             while (iter.hasNext()) {
-                datafile_id = (Long) iter.next();
+                datafile_id = iter.next();
 
                 logger.fine("Start ingest job;");
                 try {
@@ -128,9 +138,9 @@ public void onMessage(Message message) {
                             IngestReport errorReport = new IngestReport();
                             errorReport.setFailure();
                             if (ex.getMessage() != null) {
-                                errorReport.setReport("Ingest succeeded, but failed to save the ingested tabular data in the database: " + ex.getMessage());
+                                errorReport.setReport(BundleUtil.getStringFromBundle("file.ingest.saveFailed.detail.message") + ex.getMessage());
                             } else {
-                                errorReport.setReport("Ingest succeeded, but failed to save the ingested tabular data in the database; no further information is available");
+                                errorReport.setReport(BundleUtil.getStringFromBundle("file.ingest.saveFailed.message"));
                             }
                             errorReport.setDataFile(datafile);
                             datafile.setIngestReport(errorReport);
@@ -139,11 +149,10 @@ public void onMessage(Message message) {
                             logger.info("trying to save datafile and the failed ingest report, id=" + datafile_id);
                             datafile = datafileService.save(datafile);
 
-                            Dataset dataset = datafile.getOwner();
-                            if (dataset != null && dataset.getId() != null) {
+                            if (ingestMessage.getDatasetId() != null) {
                                 //logger.info("attempting to remove dataset lock for dataset " + dataset.getId());
                                 //datasetService.removeDatasetLock(dataset.getId());
-                                ingestService.sendFailNotification(dataset.getId());
+                                ingestService.sendFailNotification(ingestMessage.getDatasetId());
                             }
                         }
                     }
@@ -152,27 +161,11 @@ public void onMessage(Message message) {
 
             sbIngestedFiles.append("</ul>");
 
-            Long objectId = null;
-            
-            // Remove the dataset lock: 
-            // (note that the assumption here is that all of the datafiles
-            // packed into this IngestMessage belong to the same dataset) 
-            if (datafile_id != null) {
-                DataFile datafile = datafileService.find(datafile_id);
-                if (datafile != null) {
-                    Dataset dataset = datafile.getOwner();
-                    objectId = dataset.getId();
-                    if (dataset != null && dataset.getId() != null) {
-                        datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.Ingest);
-                    }
-                } 
-            }
-
             userNotificationService.sendNotification(
                     authenticatedUser,
                     Timestamp.from(Instant.now()),
                     !ingestWithErrors ? UserNotification.Type.INGESTCOMPLETED : UserNotification.Type.INGESTCOMPLETEDWITHERRORS,
-                    objectId,
+                    ingestMessage.getDatasetId(),
                     sbIngestedFiles.toString(),
                     true
             );
@@ -182,9 +175,15 @@ public void onMessage(Message message) {
             ex.printStackTrace(); // error in getting object from message; can't send e-mail
 
         } finally {
-            // when we're done, go ahead and remove the lock (not yet)
+            // when we're done, go ahead and remove the lock
             try {
-                //datasetService.removeDatasetLock( ingestMessage.getDatasetId() );
+                // Remove the dataset lock: 
+                // (note that the assumption here is that all of the datafiles
+                // packed into this IngestMessage belong to the same dataset) 
+                Dataset dataset = datasetService.find(ingestMessage.getDatasetId());
+                if (dataset != null && dataset.getId() != null) {
+                    datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.Ingest);
+                }
             } catch (Exception ex) {
                 ex.printStackTrace(); // application was unable to remove the datasetLock
             }
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java
index 1ba63207208..0fed25e5c88 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestQueueProducer.java
@@ -1,19 +1,19 @@
 package edu.harvard.iq.dataverse.ingest;
 
-import javax.annotation.Resource;
+import jakarta.annotation.Resource;
 // https://www.baeldung.com/jee-cdi-vs-ejb-singleton
-import javax.inject.Singleton;
-import javax.enterprise.inject.Produces;
-import javax.jms.JMSConnectionFactoryDefinition;
-import javax.jms.JMSDestinationDefinition;
-import javax.jms.Queue;
-import javax.jms.QueueConnectionFactory;
+import jakarta.inject.Singleton;
+import jakarta.enterprise.inject.Produces;
+import jakarta.jms.JMSConnectionFactoryDefinition;
+import jakarta.jms.JMSDestinationDefinition;
+import jakarta.jms.Queue;
+import jakarta.jms.QueueConnectionFactory;
 
 @JMSConnectionFactoryDefinition(
     description = "Dataverse Ingest Queue Factory",
     name = "java:app/jms/factory/ingest",
     resourceAdapter = "jmsra",
-    interfaceName = "javax.jms.QueueConnectionFactory",
+    interfaceName = "jakarta.jms.QueueConnectionFactory",
     maxPoolSize = 250,
     minPoolSize = 1,
     properties = {
@@ -25,7 +25,7 @@
     description = "Dataverse Ingest Queue",
     name = "java:app/jms/queue/ingest",
     resourceAdapter = "jmsra",
-    interfaceName="javax.jms.Queue",
+    interfaceName="jakarta.jms.Queue",
     destinationName = "DataverseIngest"
 )
 @Singleton
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java
index 31208abf839..a1a8bde77f4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestReport.java
@@ -9,17 +9,17 @@
 import edu.harvard.iq.dataverse.DataFile;
 import java.io.Serializable;
 import java.util.Date;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.Lob;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
-import javax.persistence.Temporal;
-import javax.persistence.TemporalType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.Lob;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
+import jakarta.persistence.Temporal;
+import jakarta.persistence.TemporalType;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java
index 024e90325c3..a5d6a1af75c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestRequest.java
@@ -7,16 +7,16 @@
 
 import edu.harvard.iq.dataverse.DataFile;
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
index 9c6acd964c1..233f746fb17 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
@@ -48,11 +48,14 @@
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import edu.harvard.iq.dataverse.dataaccess.S3AccessIO;
 import edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator;
+import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException;
+import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable;
 import edu.harvard.iq.dataverse.datavariable.SummaryStatistic;
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import edu.harvard.iq.dataverse.ingest.metadataextraction.FileMetadataExtractor;
 import edu.harvard.iq.dataverse.ingest.metadataextraction.FileMetadataIngest;
 import edu.harvard.iq.dataverse.ingest.metadataextraction.impl.plugins.fits.FITSFileMetadataExtractor;
+import edu.harvard.iq.dataverse.ingest.metadataextraction.impl.plugins.netcdf.NetcdfFileMetadataExtractor;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
 import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.dta.DTAFileReader;
@@ -68,7 +71,11 @@
 import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReaderSpi;
 import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReader;
 import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReaderSpi;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.storageuse.StorageUseServiceBean;
+import edu.harvard.iq.dataverse.storageuse.UploadSessionQuotaLimit;
 import edu.harvard.iq.dataverse.util.*;
+import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException;
 
 import org.apache.commons.io.IOUtils;
 //import edu.harvard.iq.dvn.unf.*;
@@ -104,20 +111,22 @@
 import java.util.ListIterator;
 import java.util.logging.Logger;
 import java.util.Hashtable;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.jms.Queue;
-import javax.jms.QueueConnectionFactory;
-import javax.annotation.Resource;
-import javax.ejb.Asynchronous;
-import javax.jms.JMSException;
-import javax.jms.QueueConnection;
-import javax.jms.QueueSender;
-import javax.jms.QueueSession;
-import javax.jms.Message;
-import javax.faces.application.FacesMessage;
-import javax.ws.rs.core.MediaType;
+import java.util.Optional;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.jms.Queue;
+import jakarta.jms.QueueConnectionFactory;
+import jakarta.annotation.Resource;
+import jakarta.ejb.Asynchronous;
+import jakarta.jms.JMSException;
+import jakarta.jms.QueueConnection;
+import jakarta.jms.QueueSender;
+import jakarta.jms.QueueSession;
+import jakarta.jms.Message;
+import jakarta.faces.application.FacesMessage;
+import jakarta.ws.rs.core.MediaType;
+import java.text.MessageFormat;
 import ucar.nc2.NetcdfFile;
 import ucar.nc2.NetcdfFiles;
 
@@ -143,6 +152,8 @@ public class IngestServiceBean {
     @EJB
     AuxiliaryFileServiceBean auxiliaryFileService;
     @EJB
+    StorageUseServiceBean storageUseService; 
+    @EJB
     SystemConfig systemConfig;
 
     @Resource(lookup = "java:app/jms/queue/ingest")
@@ -155,7 +166,8 @@ public class IngestServiceBean {
     private static String dateTimeFormat_ymdhmsS = "yyyy-MM-dd HH:mm:ss.SSS";
     private static String dateFormat_ymd = "yyyy-MM-dd";
     
-    // This method tries to permanently store new files on the filesystem. 
+    // This method tries to permanently store new files in storage (on the filesystem,
+    // in an S3 bucket, etc.).
     // Then it adds the files that *have been successfully saved* to the 
     // dataset (by attaching the DataFiles to the Dataset, and the corresponding
     // FileMetadatas to the DatasetVersion). It also tries to ensure that none 
@@ -164,267 +176,386 @@ public class IngestServiceBean {
     // DataFileCategory objects, if any were already assigned to the files). 
     // It must be called before we attempt to permanently save the files in 
     // the database by calling the Save command on the dataset and/or version.
+    
+    // !! There is way too much going on in this method. :( !!
+    
+    // @todo: Is this method a good candidate for turning into a dedicated Command? 
     public List<DataFile> saveAndAddFilesToDataset(DatasetVersion version,
-                                                   List<DataFile> newFiles,
-                                                   DataFile fileToReplace,
-                                                   boolean tabIngest) {
-		List<DataFile> ret = new ArrayList<>();
-
-		if (newFiles != null && newFiles.size() > 0) {
-			// ret = new ArrayList<>();
-			// final check for duplicate file names;
-			// we tried to make the file names unique on upload, but then
-			// the user may have edited them on the "add files" page, and
-			// renamed FOOBAR-1.txt back to FOOBAR.txt...
+            List<DataFile> newFiles,
+            DataFile fileToReplace,
+            boolean tabIngest) {
+        UploadSessionQuotaLimit uploadSessionQuota = null; 
+        List<DataFile> ret = new ArrayList<>();
+
+        if (newFiles != null && newFiles.size() > 0) {
+            // ret = new ArrayList<>();
+            // final check for duplicate file names;
+            // we tried to make the file names unique on upload, but then
+            // the user may have edited them on the "add files" page, and
+            // renamed FOOBAR-1.txt back to FOOBAR.txt...
             IngestUtil.checkForDuplicateFileNamesFinal(version, newFiles, fileToReplace);
-			Dataset dataset = version.getDataset();
-
-			for (DataFile dataFile : newFiles) {
-				boolean unattached = false;
-				boolean savedSuccess = false;
-				if (dataFile.getOwner() == null) {
-					unattached = true;
-					dataFile.setOwner(dataset);
-				}
+            Dataset dataset = version.getDataset();
+            long totalBytesSaved = 0L;
 
-				String[] storageInfo = DataAccess.getDriverIdAndStorageLocation(dataFile.getStorageIdentifier());
-				String driverType = DataAccess.getDriverType(storageInfo[0]);
-				String storageLocation = storageInfo[1];
-				String tempFileLocation = null;
-				Path tempLocationPath = null;
-				if (driverType.equals("tmp")) {  //"tmp" is the default if no prefix or the "tmp://" driver
-					tempFileLocation = FileUtil.getFilesTempDirectory() + "/" + storageLocation;
-
-					// Try to save the file in its permanent location:
-					tempLocationPath = Paths.get(tempFileLocation);
-					WritableByteChannel writeChannel = null;
-					FileChannel readChannel = null;
-
-					StorageIO<DataFile> dataAccess = null;
-
-					try {
-						logger.fine("Attempting to create a new storageIO object for " + storageLocation);
-						dataAccess = DataAccess.createNewStorageIO(dataFile, storageLocation);
-
-						logger.fine("Successfully created a new storageIO object.");
-						/*
-						 * This commented-out code demonstrates how to copy bytes from a local
-						 * InputStream (or a readChannel) into the writable byte channel of a Dataverse
-						 * DataAccessIO object:
-						 */
-
-						/*
-						 * storageIO.open(DataAccessOption.WRITE_ACCESS);
-						 * 
-						 * writeChannel = storageIO.getWriteChannel(); readChannel = new
-						 * FileInputStream(tempLocationPath.toFile()).getChannel();
-						 * 
-						 * long bytesPerIteration = 16 * 1024; // 16K bytes long start = 0; while (
-						 * start < readChannel.size() ) { readChannel.transferTo(start,
-						 * bytesPerIteration, writeChannel); start += bytesPerIteration; }
-						 */
-
-						/*
-						 * But it's easier to use this convenience method from the DataAccessIO:
-						 * 
-						 * (if the underlying storage method for this file is local filesystem, the
-						 * DataAccessIO will simply copy the file using Files.copy, like this:
-						 * 
-						 * Files.copy(tempLocationPath, storageIO.getFileSystemLocation(),
-						 * StandardCopyOption.REPLACE_EXISTING);
-						 */
-						dataAccess.savePath(tempLocationPath);
-
-						// Set filesize in bytes
-						//
-						dataFile.setFilesize(dataAccess.getSize());
-						savedSuccess = true;
-						logger.fine("Success: permanently saved file " + dataFile.getFileMetadata().getLabel());
-
-                                            // TODO: reformat this file to remove the many tabs added in cc08330
-                                            extractMetadataNcml(dataFile, tempLocationPath);
-
-					} catch (IOException ioex) {
-                    logger.warning("Failed to save the file, storage id " + dataFile.getStorageIdentifier() + " (" + ioex.getMessage() + ")");
-					} finally {
-						if (readChannel != null) {
-							try {
-								readChannel.close();
-							} catch (IOException e) {
-							}
-						}
-						if (writeChannel != null) {
-							try {
-								writeChannel.close();
-							} catch (IOException e) {
-							}
-						}
-					}
+            if (systemConfig.isStorageQuotasEnforced()) {
+                // Check if this dataset is subject to any storage quotas:
+                uploadSessionQuota = fileService.getUploadSessionQuotaLimit(dataset);
+            }
+            
+            for (DataFile dataFile : newFiles) {
+                boolean unattached = false;
+                boolean savedSuccess = false;
+                if (dataFile.getOwner() == null) {
+                    // is it ever "attached"? 
+                    // do we ever call this method with dataFile.getOwner() != null? 
+                    // - we really shouldn't be, either. 
+                    unattached = true;
+                    dataFile.setOwner(dataset);
+                }
+                
+                String[] storageInfo = DataAccess.getDriverIdAndStorageLocation(dataFile.getStorageIdentifier());
+                String driverType = DataAccess.getDriverType(storageInfo[0]);
+                String storageLocation = storageInfo[1];
+                String tempFileLocation = null;
+                Path tempLocationPath = null;
+                long confirmedFileSize = 0L; 
+                if (driverType.equals("tmp")) {  //"tmp" is the default if no prefix or the "tmp://" driver
+                    tempFileLocation = FileUtil.getFilesTempDirectory() + "/" + storageLocation;
+
+                    // Try to save the file in its permanent location:
+                    tempLocationPath = Paths.get(tempFileLocation);
+                    WritableByteChannel writeChannel = null;
+                    FileChannel readChannel = null;
+
+                    StorageIO<DataFile> dataAccess = null;
+
+                    try {
+                        logger.fine("Attempting to create a new storageIO object for " + storageLocation);
+                        dataAccess = DataAccess.createNewStorageIO(dataFile, storageLocation);
+
+                        logger.fine("Successfully created a new storageIO object.");
+                        /**
+                         * This commented-out code demonstrates how to copy
+                         * bytes from a local InputStream (or a readChannel)
+                         * into the writable byte channel of a Dataverse
+                         * DataAccessIO object:
+                         */
+
+                        /**
+                         * storageIO.open(DataAccessOption.WRITE_ACCESS);
+                         *
+                         * writeChannel = storageIO.getWriteChannel();
+                         * readChannel = new
+                         * FileInputStream(tempLocationPath.toFile()).getChannel();
+                         *
+                         * long bytesPerIteration = 16 * 1024; // 16K bytes long
+                         * start = 0; 
+                         * while ( start < readChannel.size() ) {
+                         *    readChannel.transferTo(start, bytesPerIteration, writeChannel); start += bytesPerIteration;
+                         * }
+                         */
+
+                        /**
+                         * But it's easier to use this convenience method from
+                         * the DataAccessIO:
+                         *
+                         * (if the underlying storage method for this file is
+                         * local filesystem, the DataAccessIO will simply copy
+                         * the file using Files.copy, like this:
+                         *
+                         * Files.copy(tempLocationPath,
+                         * storageIO.getFileSystemLocation(),
+                         * StandardCopyOption.REPLACE_EXISTING);
+                         */
+                        dataAccess.savePath(tempLocationPath);
+
+                        // Set filesize in bytes
+                        //
+                        confirmedFileSize = dataAccess.getSize();
+                        dataFile.setFilesize(confirmedFileSize);
+                        savedSuccess = true;
+                        logger.fine("Success: permanently saved file " + dataFile.getFileMetadata().getLabel());
+
+                        // TODO: reformat this file to remove the many tabs added in cc08330 - done, I think?
+                        extractMetadataNcml(dataFile, tempLocationPath);
+
+                    } catch (IOException ioex) {
+                        logger.warning("Failed to save the file, storage id " + dataFile.getStorageIdentifier() + " (" + ioex.getMessage() + ")");
+                    } finally {
+                        if (readChannel != null) {
+                            try {
+                                readChannel.close();
+                            } catch (IOException e) {
+                            }
+                        }
+                        if (writeChannel != null) {
+                            try {
+                                writeChannel.close();
+                            } catch (IOException e) {
+                            }
+                        }
+                    }
 
                     // Since we may have already spent some CPU cycles scaling down image thumbnails, 
-					// we may as well save them, by moving these generated images to the permanent
-					// dataset directory. We should also remember to delete any such files in the
-					// temp directory:
-					List<Path> generatedTempFiles = listGeneratedTempFiles(Paths.get(FileUtil.getFilesTempDirectory()),
-							storageLocation);
-					if (generatedTempFiles != null) {
-						for (Path generated : generatedTempFiles) {
-							if (savedSuccess) { // no need to try to save this aux file permanently, if we've failed to
-												// save the main file!
-								logger.fine("(Will also try to permanently save generated thumbnail file "
-										+ generated.toString() + ")");
-								try {
-									// Files.copy(generated, Paths.get(dataset.getFileSystemDirectory().toString(),
-									// generated.getFileName().toString()));
-									int i = generated.toString().lastIndexOf("thumb");
-									if (i > 1) {
-										String extensionTag = generated.toString().substring(i);
-										dataAccess.savePathAsAux(generated, extensionTag);
-										logger.fine(
-												"Saved generated thumbnail as aux object. \"preview available\" status: "
-														+ dataFile.isPreviewImageAvailable());
-									} else {
-										logger.warning(
-												"Generated thumbnail file name does not match the expected pattern: "
-														+ generated.toString());
-									}
-
-								} catch (IOException ioex) {
-									logger.warning("Failed to save generated file " + generated.toString());
-								}
-							}
-
-							// ... but we definitely want to delete it:
-							try {
-								Files.delete(generated);
-							} catch (IOException ioex) {
-								logger.warning("Failed to delete generated file " + generated.toString());
-							}
-						}
-					}
-					// Any necessary post-processing:
-					// performPostProcessingTasks(dataFile);
-				} else {
-					try {
-						StorageIO<DvObject> dataAccess = DataAccess.getStorageIO(dataFile);
-						//Populate metadata
-						dataAccess.open(DataAccessOption.READ_ACCESS);
-						//set file size
-						logger.fine("Setting file size: " + dataAccess.getSize());
-						dataFile.setFilesize(dataAccess.getSize());
-						if(dataAccess instanceof S3AccessIO) {
-							  ((S3AccessIO<DvObject>)dataAccess).removeTempTag();
-						}
-					} catch (IOException ioex) {
-						logger.warning("Failed to get file size, storage id " + dataFile.getStorageIdentifier() + " ("
-								+ ioex.getMessage() + ")");
-					}
-					savedSuccess = true;
-				}
+                    // we may as well save them, by moving these generated images to the permanent
+                    // dataset directory. We should also remember to delete any such files in the
+                    // temp directory:
+                    List<Path> generatedTempFiles = listGeneratedTempFiles(Paths.get(FileUtil.getFilesTempDirectory()),
+                            storageLocation);
+                    if (generatedTempFiles != null) {
+                        for (Path generated : generatedTempFiles) {
+                            if (savedSuccess) { // no need to try to save this aux file permanently, if we've failed to
+                                // save the main file!
+                                logger.fine("(Will also try to permanently save generated thumbnail file "
+                                        + generated.toString() + ")");
+                                try {
+                                    // Files.copy(generated, Paths.get(dataset.getFileSystemDirectory().toString(),
+                                    // generated.getFileName().toString()));
+                                    int i = generated.toString().lastIndexOf("thumb");
+                                    if (i > 1) {
+                                        String extensionTag = generated.toString().substring(i);
+                                        dataAccess.savePathAsAux(generated, extensionTag);
+                                        logger.fine(
+                                                "Saved generated thumbnail as aux object. \"preview available\" status: "
+                                                + dataFile.isPreviewImageAvailable());
+                                    } else {
+                                        logger.warning(
+                                                "Generated thumbnail file name does not match the expected pattern: "
+                                                + generated.toString());
+                                    }
 
-				logger.fine("Done! Finished saving new files in permanent storage and adding them to the dataset.");
-				boolean belowLimit = false;
-
-				try {
-					//getting StorageIO may require knowing the owner (so this must come before owner is potentially set back to null
-					belowLimit = dataFile.getStorageIO().isBelowIngestSizeLimit();
-				} catch (IOException e) {
-					logger.warning("Error getting ingest limit for file: " + dataFile.getIdentifier() + " : " + e.getMessage());
-				} 
-				if (unattached) {
-					dataFile.setOwner(null);
-				}
-				if (savedSuccess && belowLimit) {
-					// These are all brand new files, so they should all have
-					// one filemetadata total. -- L.A.
-					FileMetadata fileMetadata = dataFile.getFileMetadatas().get(0);
-					String fileName = fileMetadata.getLabel();
-
-					boolean metadataExtracted = false;
-					if (tabIngest && FileUtil.canIngestAsTabular(dataFile)) {
-						/*
-						 * Note that we don't try to ingest the file right away - instead we mark it as
-						 * "scheduled for ingest", then at the end of the save process it will be queued
-						 * for async. ingest in the background. In the meantime, the file will be
-						 * ingested as a regular, non-tabular file, and appear as such to the user,
-						 * until the ingest job is finished with the Ingest Service.
-						 */
-						dataFile.SetIngestScheduled();
-					} else if (fileMetadataExtractable(dataFile)) {
-
-						try {
-							// FITS is the only type supported for metadata
-							// extraction, as of now. -- L.A. 4.0
-                                                        // Note that extractMetadataNcml() is used for NetCDF/HDF5.
-							dataFile.setContentType("application/fits");
-							metadataExtracted = extractMetadata(tempFileLocation, dataFile, version);
-						} catch (IOException mex) {
-							logger.severe("Caught exception trying to extract indexable metadata from file "
-									+ fileName + ",  " + mex.getMessage());
-						}
-						if (metadataExtracted) {
-							logger.fine("Successfully extracted indexable metadata from file " + fileName);
-						} else {
-							logger.fine("Failed to extract indexable metadata from file " + fileName);
-						}
-					} else if (FileUtil.MIME_TYPE_INGESTED_FILE.equals(dataFile.getContentType())) {
+                                } catch (IOException ioex) {
+                                    logger.warning("Failed to save generated file " + generated.toString());
+                                }
+                            }
+
+                            // ... but we definitely want to delete it:
+                            try {
+                                Files.delete(generated);
+                            } catch (IOException ioex) {
+                                logger.warning("Failed to delete generated file " + generated.toString());
+                            }
+                        }
+                    }
+                    // Any necessary post-processing:
+                    // performPostProcessingTasks(dataFile);
+                } else {
+                    // This is a direct upload 
+                    try {
+                        StorageIO<DvObject> dataAccess = DataAccess.getStorageIO(dataFile);
+                        //Populate metadata
+                        dataAccess.open(DataAccessOption.READ_ACCESS);
+                        
+                        confirmedFileSize = dataAccess.getSize();
+                        
+                        // For directly-uploaded files, we will perform the file size
+                        // limit and quota checks here. Perform them *again*, in 
+                        // some cases: a directly uploaded files have already been 
+                        // checked (for the sake of being able to reject the upload 
+                        // before the user clicks "save"). But in case of direct 
+                        // uploads via API, these checks haven't been performed yet, 
+                        // so, here's our chance.
+                        
+                        Long fileSizeLimit = systemConfig.getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId());
+                        
+                        if (fileSizeLimit == null || confirmedFileSize < fileSizeLimit) {
+                        
+                            //set file size
+                            logger.fine("Setting file size: " + confirmedFileSize);
+                            dataFile.setFilesize(confirmedFileSize);
+                                                
+                            if (dataAccess instanceof S3AccessIO) {
+                                ((S3AccessIO<DvObject>) dataAccess).removeTempTag();
+                            }
+                            savedSuccess = true;
+                        }
+                    } catch (IOException ioex) {
+                        logger.warning("Failed to get file size, storage id, or failed to remove the temp tag on the saved S3 object" + dataFile.getStorageIdentifier() + " ("
+                                + ioex.getMessage() + ")");
+                    }
+                }
+                
+                // If quotas are enforced, we will perform a quota check here. 
+                // If this is an upload via the UI, we must have already 
+                // performed this check once. But it is possible that somebody 
+                // else may have added more data to the same collection/dataset 
+                // etc., before this user was ready to click "save", so this is
+                // necessary. For other cases, such as the direct uploads via 
+                // the API, this is the single point in the workflow where  
+                // storage quotas are enforced. 
+
+                if (savedSuccess) {
+                    if (uploadSessionQuota != null) {
+                        // It may be worth considering refreshing the quota here, 
+                        // and incrementing the Storage Use record for 
+                        // all the parent objects in real time, as 
+                        // *each* individual file is being saved. I experimented
+                        // with that, but decided against it for performance 
+                        // reasons. But yes, there may be some edge case where 
+                        // parallel multi-file uploads can end up being able 
+                        // to save 2X worth the quota that was available at the 
+                        // beginning of each session. 
+                        if (confirmedFileSize > uploadSessionQuota.getRemainingQuotaInBytes()) {
+                            savedSuccess = false;
+                            logger.warning("file size over quota limit, skipping");
+                            // @todo: we need to figure out how to better communicate
+                            // this (potentially partial) failure to the user.  
+                            //throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(confirmedFileSize), bytesToHumanReadable(storageQuotaLimit)));
+                        } else {
+                            // Adjust quota: 
+                            logger.info("Setting total usage in bytes to " + (uploadSessionQuota.getTotalUsageInBytes() + confirmedFileSize));
+                            uploadSessionQuota.setTotalUsageInBytes(uploadSessionQuota.getTotalUsageInBytes() + confirmedFileSize);
+                        }
+                    }
+
+                    // ... unless we had to reject the file just now because of 
+                    // the quota limits, count the number of bytes saved for the 
+                    // purposes of incrementing the total storage of the parent
+                    // DvObjectContainers:
+                    
+                    if (savedSuccess) {
+                        totalBytesSaved += confirmedFileSize; 
+                    }
+                }
+
+                logger.fine("Done! Finished saving new file in permanent storage and adding it to the dataset.");
+                boolean belowLimit = false;
+
+                try {
+                    //getting StorageIO may require knowing the owner (so this must come before owner is potentially set back to null
+                    belowLimit = dataFile.getStorageIO().isBelowIngestSizeLimit();
+                } catch (IOException e) {
+                    logger.warning("Error getting ingest limit for file: " + dataFile.getIdentifier() + " : " + e.getMessage());
+                }
+
+                if (savedSuccess && belowLimit) {
+                    // These are all brand new files, so they should all have
+                    // one filemetadata total. -- L.A.
+                    FileMetadata fileMetadata = dataFile.getFileMetadatas().get(0);
+                    String fileName = fileMetadata.getLabel();
+
+                    boolean metadataExtracted = false;
+                    boolean metadataExtractedFromNetcdf = false;
+                    if (tabIngest && FileUtil.canIngestAsTabular(dataFile)) {
+                        /**
+                         * Note that we don't try to ingest the file right away
+                         * - instead we mark it as "scheduled for ingest", then
+                         * at the end of the save process it will be queued for
+                         * async. ingest in the background. In the meantime, the
+                         * file will be ingested as a regular, non-tabular file,
+                         * and appear as such to the user, until the ingest job
+                         * is finished with the Ingest Service.
+                         */
+                        dataFile.SetIngestScheduled();
+                    } else if (fileMetadataExtractable(dataFile)) {
+
+                        try {
+                            // FITS is the only type supported for metadata
+                            // extraction, as of now. -- L.A. 4.0
+                            // Note that extractMetadataNcml() is used for NetCDF/HDF5.
+                            dataFile.setContentType("application/fits");
+                            metadataExtracted = extractMetadata(tempFileLocation, dataFile, version);
+                        } catch (IOException mex) {
+                            logger.severe("Caught exception trying to extract indexable metadata from file "
+                                    + fileName + ",  " + mex.getMessage());
+                        }
+                        if (metadataExtracted) {
+                            logger.fine("Successfully extracted indexable metadata from file " + fileName);
+                        } else {
+                            logger.fine("Failed to extract indexable metadata from file " + fileName);
+                        }
+                    } else if (fileMetadataExtractableFromNetcdf(dataFile, tempLocationPath)) {
+                        try {
+                            logger.fine("trying to extract metadata from netcdf");
+                            metadataExtractedFromNetcdf = extractMetadataFromNetcdf(tempFileLocation, dataFile, version);
+                        } catch (IOException ex) {
+                            logger.fine("could not extract metadata from netcdf: " + ex);
+                        }
+                        if (metadataExtractedFromNetcdf) {
+                            logger.fine("Successfully extracted indexable metadata from netcdf file " + fileName);
+                        } else {
+                            logger.fine("Failed to extract indexable metadata from netcdf file " + fileName);
+                        }
+
+                    } else if (FileUtil.MIME_TYPE_INGESTED_FILE.equals(dataFile.getContentType())) {
                         // Make sure no *uningested* tab-delimited files are saved with the type "text/tab-separated-values"!
                         // "text/tsv" should be used instead: 
                         dataFile.setContentType(FileUtil.MIME_TYPE_TSV);
                     }
-				}
-				// ... and let's delete the main temp file if it exists:
-				if(tempLocationPath!=null) {
-    				try {
-	    				logger.fine("Will attempt to delete the temp file " + tempLocationPath.toString());
-			    		Files.delete(tempLocationPath);
-				    } catch (IOException ex) {
-					    // (non-fatal - it's just a temp file.)
-    					logger.warning("Failed to delete temp file " + tempLocationPath.toString());
-	    			}				
-				}
-				if (savedSuccess) {
-					// temp dbug line
-					// System.out.println("ADDING FILE: " + fileName + "; for dataset: " +
-					// dataset.getGlobalId());
-					// Make sure the file is attached to the dataset and to the version, if this
-					// hasn't been done yet:
-					if (dataFile.getOwner() == null) {
-						dataFile.setOwner(dataset);
-
-						version.getFileMetadatas().add(dataFile.getFileMetadata());
-						dataFile.getFileMetadata().setDatasetVersion(version);
-						dataset.getFiles().add(dataFile);
-
-						if (dataFile.getFileMetadata().getCategories() != null) {
-							ListIterator<DataFileCategory> dfcIt = dataFile.getFileMetadata().getCategories()
-									.listIterator();
-
-							while (dfcIt.hasNext()) {
-								DataFileCategory dataFileCategory = dfcIt.next();
-
-								if (dataFileCategory.getDataset() == null) {
-									DataFileCategory newCategory = dataset
-											.getCategoryByName(dataFileCategory.getName());
-									if (newCategory != null) {
-										newCategory.addFileMetadata(dataFile.getFileMetadata());
-										// dataFileCategory = newCategory;
-										dfcIt.set(newCategory);
-									} else {
-										dfcIt.remove();
-									}
-								}
-							}
-						}
-					}
-				}
+                }
+                if (unattached) {
+                    dataFile.setOwner(null);
+                }
+                // ... and let's delete the main temp file if it exists:
+                if (tempLocationPath != null) {
+                    try {
+                        logger.fine("Will attempt to delete the temp file " + tempLocationPath.toString());
+                        Files.delete(tempLocationPath);
+                    } catch (IOException ex) {
+                        // (non-fatal - it's just a temp file.)
+                        logger.warning("Failed to delete temp file " + tempLocationPath.toString());
+                    }
+                }
+                if (savedSuccess) {
+                    // temp dbug line
+                    // System.out.println("ADDING FILE: " + fileName + "; for dataset: " +
+                    // dataset.getGlobalId());
+                    // Make sure the file is attached to the dataset and to the version, if this
+                    // hasn't been done yet:
+                    // @todo: but shouldn't we be doing the reverse if we haven't been 
+                    // able to save the file? - disconnect it from the dataset and 
+                    // the version?? - L.A. 2023 
+                    // (that said, is there *ever* a case where dataFile.getOwner() != null ?)
+                    if (dataFile.getOwner() == null) {
+                        dataFile.setOwner(dataset);
+
+                        version.getFileMetadatas().add(dataFile.getFileMetadata());
+                        dataFile.getFileMetadata().setDatasetVersion(version);
+                        dataset.getFiles().add(dataFile);
+
+                        if (dataFile.getFileMetadata().getCategories() != null) {
+                            ListIterator<DataFileCategory> dfcIt = dataFile.getFileMetadata().getCategories()
+                                    .listIterator();
+
+                            while (dfcIt.hasNext()) {
+                                DataFileCategory dataFileCategory = dfcIt.next();
+
+                                if (dataFileCategory.getDataset() == null) {
+                                    DataFileCategory newCategory = dataset.getCategoryByName(dataFileCategory.getName());
+                                    if (newCategory != null) {
+                                        newCategory.addFileMetadata(dataFile.getFileMetadata());
+                                        // dataFileCategory = newCategory;
+                                        dfcIt.set(newCategory);
+                                    } else {
+                                        dfcIt.remove();
+                                    }
+                                }
+                            }
+                        }
+                    }
+                    
+                    // Hmm. Noticing that the following two things - adding the 
+                    // files to the return list were being 
+                    // done outside of this "if (savedSuccess)" block. I'm pretty
+                    // sure that was wrong. - L.A. 11-30-2023
+                    ret.add(dataFile);
+                    // (unless that is that return value isn't used for anything - ?)
+                }
 
-				ret.add(dataFile);
-			}
-		}
+            }
+            // Update storage use for all the parent dvobjects: 
+            logger.info("Incrementing recorded storage use by " + totalBytesSaved + " bytes for dataset " + dataset.getId());
+            // Q. Need to consider what happens when this code is called on Create?
+            // A. It works on create as well, yes. (the recursive increment
+            // query in the method below does need the parent dataset to 
+            // have the database id. But even if these files have been
+            // uploaded on the Create form, we first save the dataset, and 
+            // then add the files to it. - L.A. 
+            storageUseService.incrementStorageSizeRecursively(dataset.getId(), totalBytesSaved);
+        }
 
-		return ret;
-	}
+        return ret;
+    }
     
     public List<Path> listGeneratedTempFiles(Path tempDirectory, String baseName) {
         List<Path> generatedFiles = new ArrayList<>();
@@ -467,15 +598,17 @@ public void startIngestJobsForDataset(Dataset dataset, AuthenticatedUser user) {
                 // todo: investigate why when calling save with the file object
                 // gotten from the loop, the roles assignment added at create is removed
                 // (switching to refinding via id resolves that)                
+                // possible explanation: when flush-mode is auto, flush is on query,
+                // we make sure that the roles assignment added at create is flushed
                 dataFile = fileService.find(dataFile.getId());
                 scheduledFiles.add(dataFile);
             }
         }
 
-        startIngestJobs(scheduledFiles, user);
+        startIngestJobs(dataset.getId(), scheduledFiles, user);
     }
     
-    public String startIngestJobs(List<DataFile> dataFiles, AuthenticatedUser user) {
+    public String startIngestJobs(Long datasetId, List<DataFile> dataFiles, AuthenticatedUser user) {
 
         IngestMessage ingestMessage = null;
         StringBuilder sb = new StringBuilder();
@@ -516,7 +649,7 @@ public String startIngestJobs(List<DataFile> dataFiles, AuthenticatedUser user)
         if (count > 0) {
             String info = "Ingest of " + count + " tabular data file(s) is in progress.";
             logger.info(info);
-            datasetService.addDatasetLock(scheduledFiles.get(0).getOwner().getId(),
+            datasetService.addDatasetLock(datasetId,
                     DatasetLock.Reason.Ingest,
                     (user != null) ? user.getId() : null,
                     info);
@@ -534,10 +667,12 @@ public int compare(DataFile d1, DataFile d2) {
                 }
             });
 
-            ingestMessage = new IngestMessage(IngestMessage.INGEST_MESAGE_LEVEL_INFO, user.getId());
+            ingestMessage = new IngestMessage(user.getId());
             for (int i = 0; i < count; i++) {
                 ingestMessage.addFileId(scheduledFilesArray[i].getId());
             }
+            ingestMessage.setDatasetId(datasetId);
+            ingestMessage.setInfo(info);
 
             QueueConnection conn = null;
             QueueSession session = null;
@@ -1008,7 +1143,14 @@ public boolean ingestAsTabular(Long datafile_id) {
                     }
                 }
 
-                if (!databaseSaveSuccessful) {
+                if (databaseSaveSuccessful) {
+                    // Add the size of the tab-delimited version of the data file 
+                    // that we have produced and stored to the recorded storage 
+                    // size of all the ancestor DvObjectContainers: 
+                    if (dataFile.getFilesize() > 0) {
+                        storageUseService.incrementStorageSizeRecursively(dataFile.getOwner().getId(), dataFile.getFilesize());
+                    }
+                } else {
                     logger.warning("Ingest failure (failed to save the tabular data in the database; file left intact as uploaded).");
                     return false;
                 }
@@ -1166,7 +1308,19 @@ public boolean fileMetadataExtractable(DataFile dataFile) {
         }
         return false;
     }
-    
+
+    // Inspired by fileMetadataExtractable, above
+    public boolean fileMetadataExtractableFromNetcdf(DataFile dataFile, Path tempLocationPath) {
+        logger.fine("fileMetadataExtractableFromNetcdf dataFileIn: " + dataFile + ". tempLocationPath: " + tempLocationPath + ". contentType: " + dataFile.getContentType());
+        if (dataFile.getContentType() != null
+                && (dataFile.getContentType().equals(FileUtil.MIME_TYPE_NETCDF)
+                || dataFile.getContentType().equals(FileUtil.MIME_TYPE_XNETCDF)
+                || dataFile.getContentType().equals(FileUtil.MIME_TYPE_HDF5))) {
+            return true;
+        }
+        return false;
+    }
+
     /* 
      * extractMetadata: 
      * framework for extracting metadata from uploaded files. The results will 
@@ -1219,24 +1373,28 @@ public boolean extractMetadata(String tempFileLocation, DataFile dataFile, Datas
     }
 
     /**
-     * @param dataFile The DataFile from which to attempt NcML extraction
-     * (NetCDF or HDF5 format)
-     * @param tempLocationPath Null if the file is already saved to permanent
-     * storage. Otherwise, the path to the temp location of the files, as during
-     * initial upload.
-     * @return True if the Ncml files was created. False on any error or if the
-     * NcML file already exists.
+     * Try to extract bounding box (west, south, east, north).
+     *
+     * Inspired by extractMetadata(). Consider merging the methods. Note that
+     * unlike extractMetadata(), we are not calling processFileLevelMetadata().
+     *
+     * Also consider merging with extractMetadataNcml() but while NcML should be
+     * extractable from all files that the NetCDF Java library can open only
+     * some NetCDF files will have a bounding box.
+     *
+     * Note that if we haven't yet created an API endpoint for this method for
+     * files that are already persisted to disk or S3, but the code should work
+     * to download files from S3 as necessary.
      */
-    public boolean extractMetadataNcml(DataFile dataFile, Path tempLocationPath) {
-        boolean ncmlFileCreated = false;
-        logger.fine("extractMetadataNcml: dataFileIn: " + dataFile + ". tempLocationPath: " + tempLocationPath);
-        InputStream inputStream = null;
+    public boolean extractMetadataFromNetcdf(String tempFileLocation, DataFile dataFile, DatasetVersion editVersion) throws IOException {
+        boolean ingestSuccessful = false;
+
         String dataFileLocation = null;
-        if (tempLocationPath != null) {
-            // This file was just uploaded and hasn't been saved to S3 or local storage.
-            dataFileLocation = tempLocationPath.toString();
+        if (tempFileLocation != null) {
+            logger.fine("tempFileLocation is non null. Setting dataFileLocation to " + tempFileLocation);
+            dataFileLocation = tempFileLocation;
         } else {
-            // This file is already on S3 or local storage.
+            logger.fine("tempFileLocation is null. Perhaps the file is alrady on disk or S3 direct upload is enabled.");
             File tempFile = null;
             File localFile;
             StorageIO<DataFile> storageIO;
@@ -1246,28 +1404,85 @@ public boolean extractMetadataNcml(DataFile dataFile, Path tempLocationPath) {
                 if (storageIO.isLocalFile()) {
                     localFile = storageIO.getFileSystemPath().toFile();
                     dataFileLocation = localFile.getAbsolutePath();
-                    logger.fine("extractMetadataNcml: file is local. Path: " + dataFileLocation);
+                    logger.fine("extractMetadataFromNetcdf: file is local. Path: " + dataFileLocation);
                 } else {
+                    Optional<Boolean> allow = JvmSettings.GEO_EXTRACT_S3_DIRECT_UPLOAD.lookupOptional(Boolean.class);
+                    if (!(allow.isPresent() && allow.get())) {
+                        logger.fine("extractMetadataFromNetcdf: skipping because of config is set to not slow down S3 remote upload.");
+                        return false;
+                    }
                     // Need to create a temporary local file:
-                    tempFile = File.createTempFile("tempFileExtractMetadataNcml", ".tmp");
+                    tempFile = File.createTempFile("tempFileExtractMetadataNetcdf", ".tmp");
                     try ( ReadableByteChannel targetFileChannel = (ReadableByteChannel) storageIO.getReadChannel();  FileChannel tempFileChannel = new FileOutputStream(tempFile).getChannel();) {
                         tempFileChannel.transferFrom(targetFileChannel, 0, storageIO.getSize());
                     }
                     dataFileLocation = tempFile.getAbsolutePath();
-                    logger.fine("extractMetadataNcml: file is on S3. Downloaded and saved to temp path: " + dataFileLocation);
+                    logger.fine("extractMetadataFromNetcdf: file is on S3. Downloaded and saved to temp path: " + dataFileLocation);
                 }
             } catch (IOException ex) {
-                logger.info("While attempting to extract NcML, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex);
+                logger.info("extractMetadataFromNetcdf, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex);
+                return false;
+            }
+        }
+
+        if (dataFileLocation == null) {
+            logger.fine("after all that dataFileLocation is still null! Returning early.");
+            return false;
+        }
+
+        // Locate metadata extraction plugin for the file format by looking
+        // it up with the Ingest Service Provider Registry:
+        NetcdfFileMetadataExtractor extractorPlugin = new NetcdfFileMetadataExtractor();
+        logger.fine("creating file from " + dataFileLocation);
+        File file = new File(dataFileLocation);
+        FileMetadataIngest extractedMetadata = extractorPlugin.ingestFile(file);
+        Map<String, Set<String>> extractedMetadataMap = extractedMetadata.getMetadataMap();
+
+        if (extractedMetadataMap != null) {
+            logger.fine("Ingest Service: Processing extracted metadata from netcdf;");
+            if (extractedMetadata.getMetadataBlockName() != null) {
+                logger.fine("Ingest Service: This metadata from netcdf belongs to the " + extractedMetadata.getMetadataBlockName() + " metadata block.");
+                processDatasetMetadata(extractedMetadata, editVersion);
             }
         }
+
+        ingestSuccessful = true;
+
+        return ingestSuccessful;
+    }
+
+    /**
+     * @param dataFile The DataFile from which to attempt NcML extraction
+     * (NetCDF or HDF5 format)
+     * @param tempLocationPath Null if the file is already saved to permanent
+     * storage. Otherwise, the path to the temp location of the files, as during
+     * initial upload.
+     * @return True if the Ncml files was created. False on any error or if the
+     * NcML file already exists.
+     */
+    public boolean extractMetadataNcml(DataFile dataFile, Path tempLocationPath) {
+        String contentType = dataFile.getContentType();
+        if (!("application/netcdf".equals(contentType) || "application/x-hdf5".equals(contentType))) {
+            logger.fine("Returning early from extractMetadataNcml because content type is " + contentType + " rather than application/netcdf or application/x-hdf5");
+            return false;
+        }
+        boolean ncmlFileCreated = false;
+        logger.fine("extractMetadataNcml: dataFileIn: " + dataFile + ". tempLocationPath: " + tempLocationPath);
+        InputStream inputStream = null;
+        String dataFileLocation = null;
+        if (tempLocationPath != null) {
+            logger.fine("extractMetadataNcml: tempLocationPath is non null. Setting dataFileLocation to " + tempLocationPath);
+            // This file was just uploaded and hasn't been saved to S3 or local storage.
+            dataFileLocation = tempLocationPath.toString();
+        } else {
+            logger.fine("extractMetadataNcml: tempLocationPath null. Calling getExistingFile for dataFileLocation.");
+            dataFileLocation = getExistingFile(dataFile, dataFileLocation);
+        }
         if (dataFileLocation != null) {
             try ( NetcdfFile netcdfFile = NetcdfFiles.open(dataFileLocation)) {
                 logger.fine("trying to open " + dataFileLocation);
                 if (netcdfFile != null) {
-                    // For now, empty string. What should we pass as a URL to toNcml()? The filename (including the path) most commonly at https://docs.unidata.ucar.edu/netcdf-java/current/userguide/ncml_cookbook.html
-                    // With an empty string the XML will show 'location="file:"'.
-                    String ncml = netcdfFile.toNcml("");
-                    inputStream = new ByteArrayInputStream(ncml.getBytes(StandardCharsets.UTF_8));
+                    ncmlFileCreated = isNcmlFileCreated(netcdfFile, tempLocationPath, dataFile, ncmlFileCreated);
                 } else {
                     logger.info("NetcdfFiles.open() could not open file id " + dataFile.getId() + " (null returned).");
                 }
@@ -1277,45 +1492,77 @@ public boolean extractMetadataNcml(DataFile dataFile, Path tempLocationPath) {
         } else {
             logger.info("dataFileLocation is null for file id " + dataFile.getId() + ". Can't extract NcML.");
         }
-        if (inputStream != null) {
-            // If you change NcML, you must also change the previewer.
-            String formatTag = "NcML";
-            // 0.1 is arbitrary. It's our first attempt to put out NcML so we're giving it a low number.
-            // If you bump the number here, be sure the bump the number in the previewer as well.
-            // We could use 2.2 here since that's the current version of NcML.
-            String formatVersion = "0.1";
-            String origin = "netcdf-java";
-            boolean isPublic = true;
-            // See also file.auxfiles.types.NcML in Bundle.properties. Used to group aux files in UI.
-            String type = "NcML";
-            // XML because NcML doesn't have its own MIME/content type at https://www.iana.org/assignments/media-types/media-types.xhtml
-            MediaType mediaType = new MediaType("text", "xml");
-            try {
-                // Let the cascade do the save if the file isn't yet on permanent storage.
-                boolean callSave = false;
-                if (tempLocationPath == null) {
-                    callSave = true;
-                    // Check for an existing NcML file
-                    logger.fine("Checking for existing NcML aux file for file id  " + dataFile.getId());
-                    AuxiliaryFile existingAuxiliaryFile = auxiliaryFileService.lookupAuxiliaryFile(dataFile, formatTag, formatVersion);
-                    if (existingAuxiliaryFile != null) {
-                        logger.fine("Aux file already exists for NetCDF/HDF5 file for file id  " + dataFile.getId());
-                        return false;
-                    }
+
+        return ncmlFileCreated;
+    }
+
+    private boolean isNcmlFileCreated(final NetcdfFile netcdfFile, Path tempLocationPath, DataFile dataFile, boolean ncmlFileCreated) {
+        InputStream inputStream;
+        // For now, empty string. What should we pass as a URL to toNcml()? The filename (including the path) most commonly at https://docs.unidata.ucar.edu/netcdf-java/current/userguide/ncml_cookbook.html
+        // With an empty string the XML will show 'location="file:"'.
+        String ncml = netcdfFile.toNcml("");
+        inputStream = new ByteArrayInputStream(ncml.getBytes(StandardCharsets.UTF_8));
+        String formatTag = "NcML";
+        // 0.1 is arbitrary. It's our first attempt to put out NcML so we're giving it a low number.
+        // If you bump the number here, be sure the bump the number in the previewer as well.
+        // We could use 2.2 here since that's the current version of NcML.
+        String formatVersion = "0.1";
+        String origin = "netcdf-java";
+        boolean isPublic = true;
+        // See also file.auxfiles.types.NcML in Bundle.properties. Used to group aux files in UI.
+        String type = "NcML";
+        // XML because NcML doesn't have its own MIME/content type at https://www.iana.org/assignments/media-types/media-types.xhtml
+        MediaType mediaType = new MediaType("text", "xml");
+        try {
+            // Let the cascade do the save if the file isn't yet on permanent storage.
+            boolean callSave = false;
+            if (tempLocationPath == null) {
+                callSave = true;
+                // Check for an existing NcML file
+                logger.fine("Checking for existing NcML aux file for file id  " + dataFile.getId());
+                AuxiliaryFile existingAuxiliaryFile = auxiliaryFileService.lookupAuxiliaryFile(dataFile, formatTag, formatVersion);
+                if (existingAuxiliaryFile != null) {
+                    logger.fine("Aux file already exists for NetCDF/HDF5 file for file id  " + dataFile.getId());
+                    ncmlFileCreated = false;
+//                                return false;
                 }
-                AuxiliaryFile auxFile = auxiliaryFileService.processAuxiliaryFile(inputStream, dataFile, formatTag, formatVersion, origin, isPublic, type, mediaType, callSave);
-                logger.fine("Aux file extracted from NetCDF/HDF5 file saved to storage (but not to the database yet) from file id  " + dataFile.getId());
-                ncmlFileCreated = true;
-            } catch (Exception ex) {
-                logger.info("exception throw calling processAuxiliaryFile: " + ex);
             }
-        } else {
-            logger.info("extractMetadataNcml: input stream is null! dataFileLocation was " + dataFileLocation);
+            AuxiliaryFile auxFile = auxiliaryFileService.processAuxiliaryFile(inputStream, dataFile, formatTag, formatVersion, origin, isPublic, type, mediaType, callSave);
+            logger.fine("Aux file extracted from NetCDF/HDF5 file saved to storage (but not to the database yet) from file id  " + dataFile.getId());
+            ncmlFileCreated = true;
+        } catch (Exception ex) {
+            logger.info("exception throw calling processAuxiliaryFile: " + ex);
         }
-
         return ncmlFileCreated;
     }
 
+    private String getExistingFile(DataFile dataFile, String dataFileLocation) {
+        // This file is already on S3 (non direct upload) or local storage.
+        File tempFile = null;
+        File localFile;
+        StorageIO<DataFile> storageIO;
+        try {
+            storageIO = dataFile.getStorageIO();
+            storageIO.open();
+            if (storageIO.isLocalFile()) {
+                localFile = storageIO.getFileSystemPath().toFile();
+                dataFileLocation = localFile.getAbsolutePath();
+                logger.fine("getExistingFile: file is local. Path: " + dataFileLocation);
+            } else {
+                // Need to create a temporary local file:
+                tempFile = File.createTempFile("tempFileExtractMetadataNcml", ".tmp");
+                try ( ReadableByteChannel targetFileChannel = (ReadableByteChannel) storageIO.getReadChannel();  FileChannel tempFileChannel = new FileOutputStream(tempFile).getChannel();) {
+                    tempFileChannel.transferFrom(targetFileChannel, 0, storageIO.getSize());
+                }
+                dataFileLocation = tempFile.getAbsolutePath();
+                logger.fine("getExistingFile: file is on S3. Downloaded and saved to temp path: " + dataFileLocation);
+            }
+        } catch (IOException ex) {
+            logger.fine("getExistingFile: While attempting to extract NcML, could not use storageIO for data file id " + dataFile.getId() + ". Exception: " + ex);
+        }
+        return dataFileLocation;
+    }
+
     private void processDatasetMetadata(FileMetadataIngest fileMetadataIngest, DatasetVersion editVersion) throws IOException {
         
         
@@ -1328,189 +1575,189 @@ private void processDatasetMetadata(FileMetadataIngest fileMetadataIngest, Datas
                 Map<String, Set<String>> fileMetadataMap = fileMetadataIngest.getMetadataMap();
                 for (DatasetFieldType dsft : mdb.getDatasetFieldTypes()) {
                     if (dsft.isPrimitive()) {
-                        if (!dsft.isHasParent()) {
-                            String dsfName = dsft.getName();
-                            // See if the plugin has found anything for this field: 
-                            if (fileMetadataMap.get(dsfName) != null && !fileMetadataMap.get(dsfName).isEmpty()) {
-
-                                logger.fine("Ingest Service: found extracted metadata for field " + dsfName);
-                                // go through the existing fields:
-                                for (DatasetField dsf : editVersion.getFlatDatasetFields()) {
-                                    if (dsf.getDatasetFieldType().equals(dsft)) {
-                                        // yep, this is our field!
-                                        // let's go through the values that the ingest 
-                                        // plugin found in the file for this field: 
-
-                                        Set<String> mValues = fileMetadataMap.get(dsfName);
-
-                                        // Special rules apply to aggregation of values for 
-                                        // some specific fields - namely, the resolution.* 
-                                        // fields from the Astronomy Metadata block. 
-                                        // TODO: rather than hard-coded, this needs to be
-                                        // programmatically defined. -- L.A. 4.0
-                                        if (dsfName.equals("resolution.Temporal")
-                                                || dsfName.equals("resolution.Spatial")
-                                                || dsfName.equals("resolution.Spectral")) {
-                                            // For these values, we aggregate the minimum-maximum 
-                                            // pair, for the entire set. 
-                                            // So first, we need to go through the values found by 
-                                            // the plugin and select the min. and max. values of 
-                                            // these: 
-                                            // (note that we are assuming that they all must
-                                            // validate as doubles!)
-
-                                            Double minValue = null;
-                                            Double maxValue = null;
-
-                                            for (String fValue : mValues) {
-
-                                                try {
-                                                    double thisValue = Double.parseDouble(fValue);
-
-                                                    if (minValue == null || Double.compare(thisValue, minValue) < 0) {
-                                                        minValue = thisValue;
-                                                    }
-                                                    if (maxValue == null || Double.compare(thisValue, maxValue) > 0) {
-                                                        maxValue = thisValue;
-                                                    }
-                                                } catch (NumberFormatException e) {
-                                                }
-                                            }
-
-                                            // Now let's see what aggregated values we 
-                                            // have stored already: 
-                                            
-                                            // (all of these resolution.* fields have allowedMultiple set to FALSE, 
-                                            // so there can be only one!)
-                                            //logger.fine("Min value: "+minValue+", Max value: "+maxValue);
-                                            if (minValue != null && maxValue != null) {
-                                                Double storedMinValue = null; 
-                                                Double storedMaxValue = null;
-                                            
-                                                String storedValue = "";
-                                                
-                                                if (dsf.getDatasetFieldValues() != null && dsf.getDatasetFieldValues().get(0) != null) {
-                                                    storedValue = dsf.getDatasetFieldValues().get(0).getValue();
-                                                
-                                                    if (storedValue != null && !storedValue.equals("")) {
-                                                        try {
-
-                                                            if (storedValue.indexOf(" - ") > -1) {
-                                                                storedMinValue = Double.parseDouble(storedValue.substring(0, storedValue.indexOf(" - ")));
-                                                                storedMaxValue = Double.parseDouble(storedValue.substring(storedValue.indexOf(" - ") + 3));
-                                                            } else {
-                                                                storedMinValue = Double.parseDouble(storedValue);
-                                                                storedMaxValue = storedMinValue;
-                                                            }
-                                                            if (storedMinValue != null && storedMinValue.compareTo(minValue) < 0) {
-                                                                minValue = storedMinValue;
-                                                            }
-                                                            if (storedMaxValue != null && storedMaxValue.compareTo(maxValue) > 0) {
-                                                                maxValue = storedMaxValue;
-                                                            }
-                                                        } catch (NumberFormatException e) {}
-                                                    } else {
-                                                        storedValue = "";
-                                                    }
-                                                }
+        if (!dsft.isHasParent()) {
+            String dsfName = dsft.getName();
+            // See if the plugin has found anything for this field:
+            if (fileMetadataMap.get(dsfName) != null && !fileMetadataMap.get(dsfName).isEmpty()) {
+                
+                logger.fine("Ingest Service: found extracted metadata for field " + dsfName);
+                // go through the existing fields:
+                for (DatasetField dsf : editVersion.getFlatDatasetFields()) {
+                    if (dsf.getDatasetFieldType().equals(dsft)) {
+                        // yep, this is our field!
+                        // let's go through the values that the ingest
+                        // plugin found in the file for this field:
+                        
+                        Set<String> mValues = fileMetadataMap.get(dsfName);
+                        
+                        // Special rules apply to aggregation of values for
+                        // some specific fields - namely, the resolution.*
+                        // fields from the Astronomy Metadata block.
+                        // TODO: rather than hard-coded, this needs to be
+                        // programmatically defined. -- L.A. 4.0
+                        if (dsfName.equals("resolution.Temporal")
+                                || dsfName.equals("resolution.Spatial")
+                                || dsfName.equals("resolution.Spectral")) {
+                            // For these values, we aggregate the minimum-maximum
+                            // pair, for the entire set.
+                            // So first, we need to go through the values found by
+                            // the plugin and select the min. and max. values of
+                            // these:
+                            // (note that we are assuming that they all must
+                            // validate as doubles!)
+                            
+                            Double minValue = null;
+                            Double maxValue = null;
+                            
+                            for (String fValue : mValues) {
+                                
+                                try {
+                                    double thisValue = Double.parseDouble(fValue);
+                                    
+                                    if (minValue == null || Double.compare(thisValue, minValue) < 0) {
+                                        minValue = thisValue;
+                                    }
+                                    if (maxValue == null || Double.compare(thisValue, maxValue) > 0) {
+                                        maxValue = thisValue;
+                                    }
+                                } catch (NumberFormatException e) {
+                                }
+                            }
+                            
+                            // Now let's see what aggregated values we
+                            // have stored already:
+                            
+                            // (all of these resolution.* fields have allowedMultiple set to FALSE,
+                            // so there can be only one!)
+                            //logger.fine("Min value: "+minValue+", Max value: "+maxValue);
+                            if (minValue != null && maxValue != null) {
+                                Double storedMinValue = null;
+                                Double storedMaxValue = null;
+                                
+                                String storedValue = "";
+                                
+                                if (dsf.getDatasetFieldValues() != null && dsf.getDatasetFieldValues().get(0) != null) {
+                                    storedValue = dsf.getDatasetFieldValues().get(0).getValue();
+                                    
+                                    if (storedValue != null && !storedValue.equals("")) {
+                                        try {
                                             
-                                                //logger.fine("Stored min value: "+storedMinValue+", Stored max value: "+storedMaxValue);
-                                                
-                                                String newAggregateValue = "";
-                                                
-                                                if (minValue.equals(maxValue)) {
-                                                    newAggregateValue = minValue.toString();
-                                                } else {
-                                                    newAggregateValue = minValue.toString() + " - " + maxValue.toString();
-                                                }
-                                                
-                                                // finally, compare it to the value we have now:
-                                                if (!storedValue.equals(newAggregateValue)) {
-                                                    if (dsf.getDatasetFieldValues() == null) {
-                                                        dsf.setDatasetFieldValues(new ArrayList<DatasetFieldValue>());
-                                                    }
-                                                    if (dsf.getDatasetFieldValues().get(0) == null) {
-                                                        DatasetFieldValue newDsfv = new DatasetFieldValue(dsf);
-                                                        dsf.getDatasetFieldValues().add(newDsfv);
-                                                    }
-                                                    dsf.getDatasetFieldValues().get(0).setValue(newAggregateValue);
-                                                }
+                                            if (storedValue.indexOf(" - ") > -1) {
+                                                storedMinValue = Double.parseDouble(storedValue.substring(0, storedValue.indexOf(" - ")));
+                                                storedMaxValue = Double.parseDouble(storedValue.substring(storedValue.indexOf(" - ") + 3));
+                                            } else {
+                                                storedMinValue = Double.parseDouble(storedValue);
+                                                storedMaxValue = storedMinValue;
+                                            }
+                                            if (storedMinValue != null && storedMinValue.compareTo(minValue) < 0) {
+                                                minValue = storedMinValue;
                                             }
-                                            // Ouch. 
-                                        } else {
-                                            // Other fields are aggregated simply by 
-                                            // collecting a list of *unique* values encountered 
-                                            // for this Field throughout the dataset. 
-                                            // This means we need to only add the values *not yet present*.
-                                            // (the implementation below may be inefficient - ?)
-
-                                            for (String fValue : mValues) {
-                                                if (!dsft.isControlledVocabulary()) {
-                                                    Iterator<DatasetFieldValue> dsfvIt = dsf.getDatasetFieldValues().iterator();
-
-                                                    boolean valueExists = false;
-
-                                                    while (dsfvIt.hasNext()) {
-                                                        DatasetFieldValue dsfv = dsfvIt.next();
-                                                        if (fValue.equals(dsfv.getValue())) {
-                                                            logger.fine("Value " + fValue + " already exists for field " + dsfName);
-                                                            valueExists = true;
-                                                            break;
-                                                        }
-                                                    }
-
-                                                    if (!valueExists) {
-                                                        logger.fine("Creating a new value for field " + dsfName + ": " + fValue);
-                                                        DatasetFieldValue newDsfv = new DatasetFieldValue(dsf);
-                                                        newDsfv.setValue(fValue);
-                                                        dsf.getDatasetFieldValues().add(newDsfv);
-                                                    }
-
-                                                } else {
-                                                    // A controlled vocabulary entry: 
-                                                    // first, let's see if it's a legit control vocab. entry: 
-                                                    ControlledVocabularyValue legitControlledVocabularyValue = null;
-                                                    Collection<ControlledVocabularyValue> definedVocabularyValues = dsft.getControlledVocabularyValues();
-                                                    if (definedVocabularyValues != null) {
-                                                        for (ControlledVocabularyValue definedVocabValue : definedVocabularyValues) {
-                                                            if (fValue.equals(definedVocabValue.getStrValue())) {
-                                                                logger.fine("Yes, " + fValue + " is a valid controlled vocabulary value for the field " + dsfName);
-                                                                legitControlledVocabularyValue = definedVocabValue;
-                                                                break;
-                                                            }
-                                                        }
-                                                    }
-                                                    if (legitControlledVocabularyValue != null) {
-                                                        // Only need to add the value if it is new, 
-                                                        // i.e. if it does not exist yet: 
-                                                        boolean valueExists = false;
-
-                                                        List<ControlledVocabularyValue> existingControlledVocabValues = dsf.getControlledVocabularyValues();
-                                                        if (existingControlledVocabValues != null) {
-                                                            Iterator<ControlledVocabularyValue> cvvIt = existingControlledVocabValues.iterator();
-                                                            while (cvvIt.hasNext()) {
-                                                                ControlledVocabularyValue cvv = cvvIt.next();
-                                                                if (fValue.equals(cvv.getStrValue())) {
-                                                                    // or should I use if (legitControlledVocabularyValue.equals(cvv)) ?
-                                                                    logger.fine("Controlled vocab. value " + fValue + " already exists for field " + dsfName);
-                                                                    valueExists = true;
-                                                                    break;
-                                                                }
-                                                            }
-                                                        }
-
-                                                        if (!valueExists) {
-                                                            logger.fine("Adding controlled vocabulary value " + fValue + " to field " + dsfName);
-                                                            dsf.getControlledVocabularyValues().add(legitControlledVocabularyValue);
-                                                        }
-                                                    }
+                                            if (storedMaxValue != null && storedMaxValue.compareTo(maxValue) > 0) {
+                                                maxValue = storedMaxValue;
+                                            }
+                                        } catch (NumberFormatException e) {}
+                                    } else {
+                                        storedValue = "";
+                                    }
+                                }
+                                
+                                //logger.fine("Stored min value: "+storedMinValue+", Stored max value: "+storedMaxValue);
+                                
+                                String newAggregateValue = "";
+                                
+                                if (minValue.equals(maxValue)) {
+                                    newAggregateValue = minValue.toString();
+                                } else {
+                                    newAggregateValue = minValue.toString() + " - " + maxValue.toString();
+                                }
+                                
+                                // finally, compare it to the value we have now:
+                                if (!storedValue.equals(newAggregateValue)) {
+                                    if (dsf.getDatasetFieldValues() == null) {
+                                        dsf.setDatasetFieldValues(new ArrayList<DatasetFieldValue>());
+                                    }
+                                    if (dsf.getDatasetFieldValues().get(0) == null) {
+                                        DatasetFieldValue newDsfv = new DatasetFieldValue(dsf);
+                                        dsf.getDatasetFieldValues().add(newDsfv);
+                                    }
+                                    dsf.getDatasetFieldValues().get(0).setValue(newAggregateValue);
+                                }
+                            }
+                            // Ouch.
+                        } else {
+                            // Other fields are aggregated simply by
+                            // collecting a list of *unique* values encountered
+                            // for this Field throughout the dataset.
+                            // This means we need to only add the values *not yet present*.
+                            // (the implementation below may be inefficient - ?)
+                            
+                            for (String fValue : mValues) {
+                                if (!dsft.isControlledVocabulary()) {
+                                    Iterator<DatasetFieldValue> dsfvIt = dsf.getDatasetFieldValues().iterator();
+                                    
+                                    boolean valueExists = false;
+                                    
+                                    while (dsfvIt.hasNext()) {
+                                        DatasetFieldValue dsfv = dsfvIt.next();
+                                        if (fValue.equals(dsfv.getValue())) {
+                                            logger.fine("Value " + fValue + " already exists for field " + dsfName);
+                                            valueExists = true;
+                                            break;
+                                        }
+                                    }
+                                    
+                                    if (!valueExists) {
+                                        logger.fine("Creating a new value for field " + dsfName + ": " + fValue);
+                                        DatasetFieldValue newDsfv = new DatasetFieldValue(dsf);
+                                        newDsfv.setValue(fValue);
+                                        dsf.getDatasetFieldValues().add(newDsfv);
+                                    }
+                                    
+                                } else {
+                                    // A controlled vocabulary entry:
+                                    // first, let's see if it's a legit control vocab. entry:
+                                    ControlledVocabularyValue legitControlledVocabularyValue = null;
+                                    Collection<ControlledVocabularyValue> definedVocabularyValues = dsft.getControlledVocabularyValues();
+                                    if (definedVocabularyValues != null) {
+                                        for (ControlledVocabularyValue definedVocabValue : definedVocabularyValues) {
+                                            if (fValue.equals(definedVocabValue.getStrValue())) {
+                                                logger.fine("Yes, " + fValue + " is a valid controlled vocabulary value for the field " + dsfName);
+                                                legitControlledVocabularyValue = definedVocabValue;
+                                                break;
+                                            }
+                                        }
+                                    }
+                                    if (legitControlledVocabularyValue != null) {
+                                        // Only need to add the value if it is new,
+                                        // i.e. if it does not exist yet:
+                                        boolean valueExists = false;
+                                        
+                                        List<ControlledVocabularyValue> existingControlledVocabValues = dsf.getControlledVocabularyValues();
+                                        if (existingControlledVocabValues != null) {
+                                            Iterator<ControlledVocabularyValue> cvvIt = existingControlledVocabValues.iterator();
+                                            while (cvvIt.hasNext()) {
+                                                ControlledVocabularyValue cvv = cvvIt.next();
+                                                if (fValue.equals(cvv.getStrValue())) {
+                                                    // or should I use if (legitControlledVocabularyValue.equals(cvv)) ?
+                                                    logger.fine("Controlled vocab. value " + fValue + " already exists for field " + dsfName);
+                                                    valueExists = true;
+                                                    break;
                                                 }
                                             }
                                         }
+                                        
+                                        if (!valueExists) {
+                                            logger.fine("Adding controlled vocabulary value " + fValue + " to field " + dsfName);
+                                            dsf.getControlledVocabularyValues().add(legitControlledVocabularyValue);
+                                        }
                                     }
                                 }
                             }
                         }
+                    }
+                }
+            }
+        }
                     } else {
                         // A compound field: 
                         // See if the plugin has found anything for the fields that 
@@ -1519,72 +1766,71 @@ private void processDatasetMetadata(FileMetadataIngest fileMetadataIngest, Datas
                         // create a new compound field value and its child 
                         // 
                         DatasetFieldCompoundValue compoundDsfv = new DatasetFieldCompoundValue();
-                        int nonEmptyFields = 0; 
+                        int nonEmptyFields = 0;
                         for (DatasetFieldType cdsft : dsft.getChildDatasetFieldTypes()) {
                             String dsfName = cdsft.getName();
-                            if (fileMetadataMap.get(dsfName) != null && !fileMetadataMap.get(dsfName).isEmpty()) {  
-                                logger.fine("Ingest Service: found extracted metadata for field " + dsfName + ", part of the compound field "+dsft.getName());
-                                
+                            if (fileMetadataMap.get(dsfName) != null && !fileMetadataMap.get(dsfName).isEmpty()) {
+                                logger.fine("Ingest Service: found extracted metadata for field " + dsfName + ", part of the compound field " + dsft.getName());
+
                                 if (cdsft.isPrimitive()) {
                                     // probably an unnecessary check - child fields
-                                    // of compound fields are always primitive... 
-                                    // but maybe it'll change in the future. 
+                                    // of compound fields are always primitive...
+                                    // but maybe it'll change in the future.
                                     if (!cdsft.isControlledVocabulary()) {
                                         // TODO: can we have controlled vocabulary
                                         // sub-fields inside compound fields?
-                                        
+
                                         DatasetField childDsf = new DatasetField();
                                         childDsf.setDatasetFieldType(cdsft);
-                                        
+
                                         DatasetFieldValue newDsfv = new DatasetFieldValue(childDsf);
-                                        newDsfv.setValue((String)fileMetadataMap.get(dsfName).toArray()[0]);
+                                        newDsfv.setValue((String) fileMetadataMap.get(dsfName).toArray()[0]);
                                         childDsf.getDatasetFieldValues().add(newDsfv);
-                                        
+
                                         childDsf.setParentDatasetFieldCompoundValue(compoundDsfv);
                                         compoundDsfv.getChildDatasetFields().add(childDsf);
-                                        
+
                                         nonEmptyFields++;
                                     }
-                                } 
+                                }
                             }
                         }
-                        
+
                         if (nonEmptyFields > 0) {
-                            // let's go through this dataset's fields and find the 
-                            // actual parent for this sub-field: 
+                            // let's go through this dataset's fields and find the
+                            // actual parent for this sub-field:
                             for (DatasetField dsf : editVersion.getFlatDatasetFields()) {
                                 if (dsf.getDatasetFieldType().equals(dsft)) {
-                                    
+
                                     // Now let's check that the dataset version doesn't already have
-                                    // this compound value - we are only interested in aggregating 
-                                    // unique values. Note that we need to compare compound values 
-                                    // as sets! -- i.e. all the sub fields in 2 compound fields 
-                                    // must match in order for these 2 compounds to be recognized 
+                                    // this compound value - we are only interested in aggregating
+                                    // unique values. Note that we need to compare compound values
+                                    // as sets! -- i.e. all the sub fields in 2 compound fields
+                                    // must match in order for these 2 compounds to be recognized
                                     // as "the same":
-                                    
-                                    boolean alreadyExists = false; 
+                                    boolean alreadyExists = false;
                                     for (DatasetFieldCompoundValue dsfcv : dsf.getDatasetFieldCompoundValues()) {
-                                        int matches = 0; 
+                                        int matches = 0;
 
                                         for (DatasetField cdsf : dsfcv.getChildDatasetFields()) {
                                             String cdsfName = cdsf.getDatasetFieldType().getName();
                                             String cdsfValue = cdsf.getDatasetFieldValues().get(0).getValue();
                                             if (cdsfValue != null && !cdsfValue.equals("")) {
-                                                String extractedValue = (String)fileMetadataMap.get(cdsfName).toArray()[0];
-                                                logger.fine("values: existing: "+cdsfValue+", extracted: "+extractedValue);
+                                                String extractedValue = (String) fileMetadataMap.get(cdsfName).toArray()[0];
+                                                logger.fine("values: existing: " + cdsfValue + ", extracted: " + extractedValue);
                                                 if (cdsfValue.equals(extractedValue)) {
                                                     matches++;
                                                 }
                                             }
                                         }
                                         if (matches == nonEmptyFields) {
-                                            alreadyExists = true; 
+                                            alreadyExists = true;
                                             break;
                                         }
                                     }
-                                                                        
+
                                     if (!alreadyExists) {
-                                        // save this compound value, by attaching it to the 
+                                        // save this compound value, by attaching it to the
                                         // version for proper cascading:
                                         compoundDsfv.setParentDatasetField(dsf);
                                         dsf.getDatasetFieldCompoundValues().add(compoundDsfv);
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java
index 9ea2cd0343f..8c5dad237b1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceShapefileHelper.java
@@ -16,7 +16,7 @@
 import java.io.IOException;
 import java.util.List;
 import java.util.logging.Logger;
-//import javax.ejb.EJB;
+//import jakarta.ejb.EJB;
 
 /**
  *  Used by the IngestServiceBean to redistribute a zipped Shapefile*
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
index 9484a412913..3d30f7e6ec3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
@@ -20,15 +20,12 @@
 package edu.harvard.iq.dataverse.ingest;
 
 import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.util.FileUtil;
 
 import java.io.File;
-import java.io.FileWriter;
 import java.io.IOException;
-import java.io.PrintWriter;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;
@@ -36,9 +33,9 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 import org.dataverse.unf.UNFUtil;
 import org.dataverse.unf.UnfException;
 
@@ -350,7 +347,7 @@ public static boolean shouldHaveUnf(DatasetVersion version) {
             return false;
         }
         List<String> values = getUnfValuesOfFiles(version);
-        logger.fine("UNF values for files from Dataset version " + version.getSemanticVersion() + " from " + version.getDataset().getGlobalIdString() + ": " + values);
+        logger.fine("UNF values for files from Dataset version " + version.getSemanticVersion() + " from " + version.getDataset().getGlobalId().asString() + ": " + values);
         if (values.size() > 0) {
             return true;
         } else {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
index 5f771d2756b..9b62b62fe61 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestableDataChecker.java
@@ -610,20 +610,19 @@ public String detectTabularDataFormat(File fh) {
         String readableFormatType = null;
         FileChannel srcChannel = null;
         FileInputStream inp = null;
-        try {
-            int buffer_size = this.getBufferSize(fh);
-            dbgLog.fine("buffer_size: " + buffer_size);
         
+        try {
             // set-up a FileChannel instance for a given file object
             inp = new FileInputStream(fh);
             srcChannel = inp.getChannel();
+            long buffer_size = this.getBufferSize(srcChannel);
+            dbgLog.fine("buffer_size: " + buffer_size);
 
             // create a read-only MappedByteBuffer
             MappedByteBuffer buff = srcChannel.map(FileChannel.MapMode.READ_ONLY, 0, buffer_size);
-
+            
             //this.printHexDump(buff, "hex dump of the byte-buffer");
 
-            //for (String fmt : defaultFormatSet){
             buff.rewind();
             dbgLog.fine("before the for loop");
             for (String fmt : this.getTestFormatSet()) {
@@ -646,7 +645,6 @@ public String detectTabularDataFormat(File fh) {
                             readableFormatType = result;
                         }
                         dbgLog.fine("readableFormatType=" + readableFormatType);
-                        return readableFormatType;
                     } else {
                         dbgLog.fine("null was returned for " + fmt + " test");
                         if (DEBUG) {
@@ -669,9 +667,15 @@ public String detectTabularDataFormat(File fh) {
                     dbgLog.info("BufferUnderflowException " + e);
                     e.printStackTrace();
                 }
+                
+                if (readableFormatType != null) {
+                    break;
+                }
             }
-
-            return readableFormatType;
+            
+            // help garbage-collect the mapped buffer sooner, to avoid the jvm  
+            // holding onto the underlying file unnecessarily:
+            buff = null; 
 
         } catch (FileNotFoundException fe) {
             dbgLog.fine("exception detected: file was not foud");
@@ -716,15 +720,19 @@ private String checkUncompressedFirst5bytes(String fisrt5bytes) {
      * adjust the size of the buffer according to the size of 
      * the file if necessary; otherwise, use the default size
      */
-    private int getBufferSize(File fh) {
+    private long getBufferSize(FileChannel fileChannel) {
         boolean DEBUG = false;
         int BUFFER_SIZE = DEFAULT_BUFFER_SIZE;
-        if (fh.length() < DEFAULT_BUFFER_SIZE) {
-            BUFFER_SIZE = (int) fh.length();
+        try {
+        if (fileChannel.size() < DEFAULT_BUFFER_SIZE) {
+            BUFFER_SIZE = (int) fileChannel.size();
             if (DEBUG) {
                 out.println("non-default buffer_size: new size=" + BUFFER_SIZE);
             }
         }
+        } catch (IOException ioex) {
+            dbgLog.warning("failed to check the physical file size under an open FileChannel");
+        }
         return BUFFER_SIZE;
     }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractor.java b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractor.java
new file mode 100644
index 00000000000..9221a6ca679
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractor.java
@@ -0,0 +1,192 @@
+package edu.harvard.iq.dataverse.ingest.metadataextraction.impl.plugins.netcdf;
+
+import edu.harvard.iq.dataverse.DatasetFieldConstant;
+import edu.harvard.iq.dataverse.ingest.metadataextraction.FileMetadataExtractor;
+import edu.harvard.iq.dataverse.ingest.metadataextraction.FileMetadataIngest;
+import edu.harvard.iq.dataverse.ingest.metadataextraction.spi.FileMetadataExtractorSpi;
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.logging.Logger;
+import ucar.ma2.DataType;
+import ucar.nc2.Attribute;
+import ucar.nc2.NetcdfFile;
+import ucar.nc2.NetcdfFiles;
+
+public class NetcdfFileMetadataExtractor extends FileMetadataExtractor {
+
+    private static final Logger logger = Logger.getLogger(NetcdfFileMetadataExtractor.class.getCanonicalName());
+
+    public static final String WEST_LONGITUDE_KEY = "geospatial_lon_min";
+    public static final String EAST_LONGITUDE_KEY = "geospatial_lon_max";
+    public static final String NORTH_LATITUDE_KEY = "geospatial_lat_max";
+    public static final String SOUTH_LATITUDE_KEY = "geospatial_lat_min";
+
+    private static final String GEOSPATIAL_BLOCK_NAME = "geospatial";
+    private static final String WEST_LONGITUDE = DatasetFieldConstant.westLongitude;
+    private static final String EAST_LONGITUDE = DatasetFieldConstant.eastLongitude;
+    private static final String NORTH_LATITUDE = DatasetFieldConstant.northLatitude;
+    private static final String SOUTH_LATITUDE = DatasetFieldConstant.southLatitude;
+
+    public NetcdfFileMetadataExtractor(FileMetadataExtractorSpi originatingProvider) {
+        super(originatingProvider);
+    }
+
+    public NetcdfFileMetadataExtractor() {
+        super(null);
+    }
+
+    @Override
+    public FileMetadataIngest ingest(BufferedInputStream stream) throws IOException {
+        throw new UnsupportedOperationException("Not supported yet.");
+    }
+
+    public FileMetadataIngest ingestFile(File file) throws IOException {
+        FileMetadataIngest fileMetadataIngest = new FileMetadataIngest();
+        fileMetadataIngest.setMetadataBlockName(GEOSPATIAL_BLOCK_NAME);
+
+        Map<String, String> geoFields = parseGeospatial(getNetcdfFile(file));
+        WestAndEastLongitude welong = getStandardLongitude(new WestAndEastLongitude(geoFields.get(WEST_LONGITUDE), geoFields.get(EAST_LONGITUDE)));
+        String westLongitudeFinal = welong != null ? welong.getWestLongitude() : null;
+        String eastLongitudeFinal = welong != null ? welong.getEastLongitude() : null;
+        String northLatitudeFinal = geoFields.get(NORTH_LATITUDE);
+        String southLatitudeFinal = geoFields.get(SOUTH_LATITUDE);
+
+        logger.fine(getLineStringsUrl(westLongitudeFinal, southLatitudeFinal, eastLongitudeFinal, northLatitudeFinal));
+
+        Map<String, Set<String>> metadataMap = new HashMap<>();
+        metadataMap.put(WEST_LONGITUDE, new HashSet<>());
+        metadataMap.get(WEST_LONGITUDE).add(westLongitudeFinal);
+        metadataMap.put(EAST_LONGITUDE, new HashSet<>());
+        metadataMap.get(EAST_LONGITUDE).add(eastLongitudeFinal);
+        metadataMap.put(NORTH_LATITUDE, new HashSet<>());
+        metadataMap.get(NORTH_LATITUDE).add(northLatitudeFinal);
+        metadataMap.put(SOUTH_LATITUDE, new HashSet<>());
+        metadataMap.get(SOUTH_LATITUDE).add(southLatitudeFinal);
+        fileMetadataIngest.setMetadataMap(metadataMap);
+        return fileMetadataIngest;
+    }
+
+    public NetcdfFile getNetcdfFile(File file) throws IOException {
+        /**
+         * <attribute name="geospatial_lat_min" value="25.066666666666666" />
+         * south
+         * <attribute name="geospatial_lat_max" value="49.40000000000000" />
+         * north
+         * <attribute name="geospatial_lon_min" value="-124.7666666333333" />
+         * west
+         * <attribute name="geospatial_lon_max" value="-67.058333300000015" />
+         * east
+         * <attribute name="geospatial_lon_resolution" value="0.041666666666666" />
+         * <attribute name="geospatial_lat_resolution" value="0.041666666666666" />
+         * <attribute name="geospatial_lat_units" value="decimal_degrees north" />
+         * <attribute name="geospatial_lon_units" value="decimal_degrees east" />
+         */
+        return NetcdfFiles.open(file.getAbsolutePath());
+    }
+
+    private Map<String, String> parseGeospatial(NetcdfFile netcdfFile) {
+        Map<String, String> geoFields = new HashMap<>();
+
+        Attribute westLongitude = netcdfFile.findGlobalAttribute(WEST_LONGITUDE_KEY);
+        Attribute eastLongitude = netcdfFile.findGlobalAttribute(EAST_LONGITUDE_KEY);
+        Attribute northLatitude = netcdfFile.findGlobalAttribute(NORTH_LATITUDE_KEY);
+        Attribute southLatitude = netcdfFile.findGlobalAttribute(SOUTH_LATITUDE_KEY);
+
+        geoFields.put(DatasetFieldConstant.westLongitude, getValue(westLongitude));
+        geoFields.put(DatasetFieldConstant.eastLongitude, getValue(eastLongitude));
+        geoFields.put(DatasetFieldConstant.northLatitude, getValue(northLatitude));
+        geoFields.put(DatasetFieldConstant.southLatitude, getValue(southLatitude));
+
+        logger.fine(getLineStringsUrl(
+                geoFields.get(DatasetFieldConstant.westLongitude),
+                geoFields.get(DatasetFieldConstant.southLatitude),
+                geoFields.get(DatasetFieldConstant.eastLongitude),
+                geoFields.get(DatasetFieldConstant.northLatitude)));
+
+        return geoFields;
+    }
+
+    // We store strings in the database.
+    private String getValue(Attribute attribute) {
+        if (attribute == null) {
+            return null;
+        }
+        DataType dataType = attribute.getDataType();
+        if (dataType.isString()) {
+            return attribute.getStringValue();
+        } else if (dataType.isNumeric()) {
+            return attribute.getNumericValue().toString();
+        } else {
+            return null;
+        }
+    }
+
+    // Convert to standard -180 to 180 range by subtracting 360
+    // if both longitudea are greater than 180. For example:
+    //       west     south      east      north
+    //     343.68,     41.8,   353.78,     49.62 becomes
+    //     -16.320007, 41.8,    -6.220001, 49.62 instead
+    // "If one of them is > 180, the domain is 0:360.
+    // If one of them is <0, the domain is -180:180.
+    // If both are between 0 and 180, the answer is indeterminate."
+    // https://github.com/cf-convention/cf-conventions/issues/435#issuecomment-1505614364
+    // Solr only wants -180 to 180. It will throw an error for values outside this range.
+    public WestAndEastLongitude getStandardLongitude(WestAndEastLongitude westAndEastLongitude) {
+        if (westAndEastLongitude == null) {
+            return null;
+        }
+        if (westAndEastLongitude.getWestLongitude() == null || westAndEastLongitude.getEastLongitude() == null) {
+            return null;
+        }
+        float eastAsFloat;
+        float westAsFloat;
+        try {
+            westAsFloat = Float.valueOf(westAndEastLongitude.getWestLongitude());
+            eastAsFloat = Float.valueOf(westAndEastLongitude.getEastLongitude());
+        } catch (NumberFormatException ex) {
+            return null;
+        }
+        // "If one of them is > 180, the domain is 0:360"
+        if (westAsFloat > 180 && eastAsFloat > 180) {
+            Float westStandard = westAsFloat - 360;
+            Float eastStandard = eastAsFloat - 360;
+            WestAndEastLongitude updatedWeLong = new WestAndEastLongitude(westStandard.toString(), eastStandard.toString());
+            return updatedWeLong;
+        }
+        // "If one of them is <0, the domain is -180:180."
+        // 180:180 is what Solr wants. Return it.
+        if (westAsFloat < 0 || eastAsFloat < 0) {
+            // BUT! Don't return it if the values
+            // are so low to be out of range!
+            // Something must be wrong with the data.
+            if (westAsFloat < -180 || eastAsFloat < -180) {
+                return null;
+            }
+            if (westAsFloat > 180 || eastAsFloat > 180) {
+                // Not in the proper range of -80:180
+                return null;
+            }
+            return westAndEastLongitude;
+        }
+        if ((westAsFloat > 180 || eastAsFloat > 180) && (westAsFloat < 180 || eastAsFloat < 180)) {
+            // One value is over 180 and the other is under 180.
+            // We don't know if we should subtract 360 or not.
+            // Return null to prevent inserting a potentially
+            // incorrect bounding box.
+            return null;
+        }
+        return westAndEastLongitude;
+    }
+
+    // Generates a handy link to see what the bounding box looks like on a map
+    private String getLineStringsUrl(String west, String south, String east, String north) {
+        // BBOX (Left (LON) ,Bottom (LAT), Right (LON), Top (LAT), comma separated, with or without decimal point):
+        return "https://linestrings.com/bbox/#" + west + "," + south + "," + east + "," + north;
+    }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/WestAndEastLongitude.java b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/WestAndEastLongitude.java
new file mode 100644
index 00000000000..02a984f3424
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/WestAndEastLongitude.java
@@ -0,0 +1,52 @@
+package edu.harvard.iq.dataverse.ingest.metadataextraction.impl.plugins.netcdf;
+
+import java.util.Objects;
+
+public class WestAndEastLongitude {
+
+    private final String westLongitude;
+    private final String eastLongitude;
+
+    public WestAndEastLongitude(String westLongitude, String eastLongitude) {
+        this.westLongitude = westLongitude;
+        this.eastLongitude = eastLongitude;
+    }
+
+    public String getWestLongitude() {
+        return westLongitude;
+    }
+
+    public String getEastLongitude() {
+        return eastLongitude;
+    }
+
+    @Override
+    public String toString() {
+        return "WestAndEastLongitude{" + "westLongitude=" + westLongitude + ", eastLongitude=" + eastLongitude + '}';
+    }
+
+    @Override
+    public int hashCode() {
+        int hash = 3;
+        return hash;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null) {
+            return false;
+        }
+        if (getClass() != obj.getClass()) {
+            return false;
+        }
+        final WestAndEastLongitude other = (WestAndEastLongitude) obj;
+        if (!Objects.equals(this.westLongitude, other.westLongitude)) {
+            return false;
+        }
+        return Objects.equals(this.eastLongitude, other.eastLongitude);
+    }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java
index ab8f610cb06..a30dfafe67f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/metadataextraction/spi/FileMetadataExtractorSpi.java
@@ -10,9 +10,7 @@
 import java.util.logging.*;
 import java.io.*;
 
-import edu.harvard.iq.dataverse.ingest.plugin.spi.RegisterableService;
 import edu.harvard.iq.dataverse.ingest.plugin.spi.IngestServiceProvider;
-import edu.harvard.iq.dataverse.ingest.plugin.spi.ServiceRegistry;
 import java.nio.MappedByteBuffer;
 import java.util.Locale;
 
@@ -44,22 +42,6 @@ public FileMetadataExtractorSpi(String vendorName, String version) {
         this.version = version;
     }
 
-    public void onRegistration(ServiceRegistry registry,
-                               Class<?> category) {}
-                               
-    
-    public void onDeregistration(ServiceRegistry registry,
-                                 Class<?> category) {}
-    
-    public String getVersion() {
-        return version;
-    }
-
-    public String getVendorName() {
-        return vendorName;
-    }
-
-    
     public abstract String getDescription(Locale locale);
     
     protected String[] names = null;
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java
deleted file mode 100644
index 4db48b5c06a..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/DigraphNode.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
-   Copyright (C) 2005-2012, by the President and Fellows of Harvard College.
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-         http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-   Dataverse Network - A web application to share, preserve and analyze research data.
-   Developed at the Institute for Quantitative Social Science, Harvard University.
-   Version 3.0.
-*/
-
-package edu.harvard.iq.dataverse.ingest.plugin.spi;
-
-// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz
-// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz
-// downloaded: 2009-05-07
-
-
-/*
- * Copyright 2000 Sun Microsystems, Inc.  All Rights Reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.  Sun designates this
- * particular file as subject to the "Classpath" exception as provided
- * by Sun in the LICENSE file that accompanied this code.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
- * CA 95054 USA or visit www.sun.com if you need additional information or
- * have any questions.
- */
-
-//package javax.imageio.spi;
-
-import java.io.Serializable;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Set;
-
-/**
- * A node in a directed graph.  In addition to an arbitrary
- * <code>Object</code> containing user data associated with the node,
- * each node maintains a <code>Set</code>s of nodes which are pointed
- * to by the current node (available from <code>getOutNodes</code>).
- * The in-degree of the node (that is, number of nodes that point to
- * the current node) may be queried.
- *
- */
-class DigraphNode implements Cloneable, Serializable {
-
-    /** The data associated with this node. */
-    protected Object data;
-
-    /**
-     * A <code>Set</code> of neighboring nodes pointed to by this
-     * node.
-     */
-    protected Set outNodes = new HashSet();
-
-    /** The in-degree of the node. */
-    protected int inDegree = 0;
-
-    /**
-     * A <code>Set</code> of neighboring nodes that point to this
-     * node.
-     */
-    private Set inNodes = new HashSet();
-
-    public DigraphNode(Object data) {
-        this.data = data;
-    }
-
-    /** Returns the <code>Object</code> referenced by this node. */
-    public Object getData() {
-        return data;
-    }
-
-    /**
-     * Returns an <code>Iterator</code> containing the nodes pointed
-     * to by this node.
-     */
-    public Iterator getOutNodes() {
-        return outNodes.iterator();
-    }
-
-    /**
-     * Adds a directed edge to the graph.  The outNodes list of this
-     * node is updated and the in-degree of the other node is incremented.
-     *
-     * @param node a <code>DigraphNode</code>.
-     *
-     * @return <code>true</code> if the node was not previously the
-     * target of an edge.
-     */
-    public boolean addEdge(DigraphNode node) {
-        if (outNodes.contains(node)) {
-            return false;
-        }
-
-        outNodes.add(node);
-        node.inNodes.add(this);
-        node.incrementInDegree();
-        return true;
-    }
-
-    /**
-     * Returns <code>true</code> if an edge exists between this node
-     * and the given node.
-     *
-     * @param node a <code>DigraphNode</code>.
-     *
-     * @return <code>true</code> if the node is the target of an edge.
-     */
-    public boolean hasEdge(DigraphNode node) {
-        return outNodes.contains(node);
-    }
-
-    /**
-     * Removes a directed edge from the graph.  The outNodes list of this
-     * node is updated and the in-degree of the other node is decremented.
-     *
-     * @return <code>true</code> if the node was previously the target
-     * of an edge.
-     */
-    public boolean removeEdge(DigraphNode node) {
-        if (!outNodes.contains(node)) {
-            return false;
-        }
-
-        outNodes.remove(node);
-        node.inNodes.remove(this);
-        node.decrementInDegree();
-        return true;
-    }
-
-    /**
-     * Removes this node from the graph, updating neighboring nodes
-     * appropriately.
-     */
-    public void dispose() {
-        Object[] inNodesArray = inNodes.toArray();
-        for(int i=0; i<inNodesArray.length; i++) {
-            DigraphNode node = (DigraphNode) inNodesArray[i];
-            node.removeEdge(this);
-        }
-
-        Object[] outNodesArray = outNodes.toArray();
-        for(int i=0; i<outNodesArray.length; i++) {
-            DigraphNode node = (DigraphNode) outNodesArray[i];
-            removeEdge(node);
-        }
-    }
-
-    /** Returns the in-degree of this node. */
-    public int getInDegree() {
-        return inDegree;
-    }
-
-    /** Increments the in-degree of this node. */
-    private void incrementInDegree() {
-        ++inDegree;
-    }
-
-    /** Decrements the in-degree of this node. */
-    private void decrementInDegree() {
-        --inDegree;
-    }
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/IngestServiceProvider.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/IngestServiceProvider.java
index c327ecb2da1..c792156d648 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/IngestServiceProvider.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/IngestServiceProvider.java
@@ -29,7 +29,7 @@
  * 
  * @author akio sone at UNC-Odum
  */
-public abstract class IngestServiceProvider  implements RegisterableService {
+public abstract class IngestServiceProvider  {
 
     /**
      * The name of the vendor that is responsible for coding this
@@ -62,29 +62,6 @@ public IngestServiceProvider(String vendorName, String version) {
     public IngestServiceProvider() {
     }
 
-    /**
-     * A callback to be called exactly once after this Spi class
-     * has been instantiated and registered in a 
-     * <code>ServiceRegistry</code>.
-     *
-     * @param registry the ServiceRegistry instance.
-     * @param category a <code>Class</code>  object that indicatges
-     * its registry category under which this object has been registered.
-     * category.
-     */
-    public void onRegistration(ServiceRegistry registry,
-                               Class<?> category) {}
-                               
-    /**
-     * A callback whenever this Spi class is deregistered from
-     * a <code>ServiceRegistry</code>.
-     *
-     * @param registry the ServiceRegistry instance.
-     * @param category a <code>Class</code> object that indicatges
-     * its registry category from which this object is being de-registered.
-     */
-    public void onDeregistration(ServiceRegistry registry,
-                                 Class<?> category) {}
     /**
      * Gets the value of the version field.
      *
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java
deleted file mode 100644
index 87f4f57cdb6..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/PartiallyOrderedSet.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/*
-   Copyright (C) 2005-2012, by the President and Fellows of Harvard College.
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-         http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-   Dataverse Network - A web application to share, preserve and analyze research data.
-   Developed at the Institute for Quantitative Social Science, Harvard University.
-   Version 3.0.
-*/
-
-package edu.harvard.iq.dataverse.ingest.plugin.spi;
-
-// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz
-// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz
-// downloaded: 2009-05-07
-
-
-/*
- * Copyright 2000 Sun Microsystems, Inc.  All Rights Reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.  Sun designates this
- * particular file as subject to the "Classpath" exception as provided
- * by Sun in the LICENSE file that accompanied this code.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
- * CA 95054 USA or visit www.sun.com if you need additional information or
- * have any questions.
- */
-
-//package javax.imageio.spi;
-
-import java.util.AbstractSet;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * A set of <code>Object</code>s with pairwise orderings between them.
- * The <code>iterator</code> method provides the elements in
- * topologically sorted order.  Elements participating in a cycle
- * are not returned.
- *
- * Unlike the <code>SortedSet</code> and <code>SortedMap</code>
- * interfaces, which require their elements to implement the
- * <code>Comparable</code> interface, this class receives ordering
- * information via its <code>setOrdering</code> and
- * <code>unsetPreference</code> methods.  This difference is due to
- * the fact that the relevant ordering between elements is unlikely to
- * be inherent in the elements themselves; rather, it is set
- * dynamically accoring to application policy.  For example, in a
- * service provider registry situation, an application might allow the
- * user to set a preference order for service provider objects
- * supplied by a trusted vendor over those supplied by another.
- *
- */
-class PartiallyOrderedSet extends AbstractSet {
-
-    // The topological sort (roughly) follows the algorithm described in
-    // Horowitz and Sahni, _Fundamentals of Data Structures_ (1976),
-    // p. 315.
-
-    // Maps Objects to DigraphNodes that contain them
-    private Map poNodes = new HashMap();
-
-    // The set of Objects
-    private Set nodes = poNodes.keySet();
-
-    /**
-     * Constructs a <code>PartiallyOrderedSet</code>.
-     */
-    public PartiallyOrderedSet() {}
-
-    public int size() {
-        return nodes.size();
-    }
-
-    public boolean contains(Object o) {
-        return nodes.contains(o);
-    }
-
-    /**
-     * Returns an iterator over the elements contained in this
-     * collection, with an ordering that respects the orderings set
-     * by the <code>setOrdering</code> method.
-     */
-    public Iterator iterator() {
-        return new PartialOrderIterator(poNodes.values().iterator());
-    }
-
-    /**
-     * Adds an <code>Object</code> to this
-     * <code>PartiallyOrderedSet</code>.
-     */
-    public boolean add(Object o) {
-        if (nodes.contains(o)) {
-            return false;
-        }
-
-        DigraphNode node = new DigraphNode(o);
-        poNodes.put(o, node);
-        return true;
-    }
-
-    /**
-     * Removes an <code>Object</code> from this
-     * <code>PartiallyOrderedSet</code>.
-     */
-    public boolean remove(Object o) {
-        DigraphNode node = (DigraphNode)poNodes.get(o);
-        if (node == null) {
-            return false;
-        }
-
-        poNodes.remove(o);
-        node.dispose();
-        return true;
-    }
-
-    public void clear() {
-        poNodes.clear();
-    }
-
-    /**
-     * Sets an ordering between two nodes.  When an iterator is
-     * requested, the first node will appear earlier in the
-     * sequence than the second node.  If a prior ordering existed
-     * between the nodes in the opposite order, it is removed.
-     *
-     * @return <code>true</code> if no prior ordering existed
-     * between the nodes, <code>false</code>otherwise.
-     */
-    public boolean setOrdering(Object first, Object second) {
-        DigraphNode firstPONode =
-            (DigraphNode)poNodes.get(first);
-        DigraphNode secondPONode =
-            (DigraphNode)poNodes.get(second);
-
-        secondPONode.removeEdge(firstPONode);
-        return firstPONode.addEdge(secondPONode);
-    }
-
-    /**
-     * Removes any ordering between two nodes.
-     *
-     * @return true if a prior prefence existed between the nodes.
-     */
-    public boolean unsetOrdering(Object first, Object second) {
-        DigraphNode firstPONode =
-            (DigraphNode)poNodes.get(first);
-        DigraphNode secondPONode =
-            (DigraphNode)poNodes.get(second);
-
-        return firstPONode.removeEdge(secondPONode) ||
-            secondPONode.removeEdge(firstPONode);
-    }
-
-    /**
-     * Returns <code>true</code> if an ordering exists between two
-     * nodes.
-     */
-    public boolean hasOrdering(Object preferred, Object other) {
-        DigraphNode preferredPONode =
-            (DigraphNode)poNodes.get(preferred);
-        DigraphNode otherPONode =
-            (DigraphNode)poNodes.get(other);
-
-        return preferredPONode.hasEdge(otherPONode);
-    }
-}
-
-class PartialOrderIterator implements Iterator {
-
-    LinkedList zeroList = new LinkedList();
-    Map inDegrees = new HashMap(); // DigraphNode -> Integer
-
-    public PartialOrderIterator(Iterator iter) {
-        // Initialize scratch in-degree values, zero list
-        while (iter.hasNext()) {
-            DigraphNode node = (DigraphNode)iter.next();
-            int inDegree = node.getInDegree();
-            inDegrees.put(node, new Integer(inDegree));
-
-            // Add nodes with zero in-degree to the zero list
-            if (inDegree == 0) {
-                zeroList.add(node);
-            }
-        }
-    }
-
-    public boolean hasNext() {
-        return !zeroList.isEmpty();
-    }
-
-    public Object next() {
-        DigraphNode first = (DigraphNode)zeroList.removeFirst();
-
-        // For each out node of the output node, decrement its in-degree
-        Iterator outNodes = first.getOutNodes();
-        while (outNodes.hasNext()) {
-            DigraphNode node = (DigraphNode)outNodes.next();
-            int inDegree = ((Integer)inDegrees.get(node)).intValue() - 1;
-            inDegrees.put(node, new Integer(inDegree));
-
-            // If the in-degree has fallen to 0, place the node on the list
-            if (inDegree == 0) {
-                zeroList.add(node);
-            }
-        }
-
-        return first.getData();
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException();
-    }
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java
deleted file mode 100644
index d3609b1e4b9..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/RegisterableService.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
-   Copyright (C) 2005-2012, by the President and Fellows of Harvard College.
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-         http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-   Dataverse Network - A web application to share, preserve and analyze research data.
-   Developed at the Institute for Quantitative Social Science, Harvard University.
-   Version 3.0.
-*/
-
-package edu.harvard.iq.dataverse.ingest.plugin.spi;
-
-// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz
-// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz
-// downloaded: 2009-05-07
-
-
-/*
- * Copyright 2000-2004 Sun Microsystems, Inc.  All Rights Reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.  Sun designates this
- * particular file as subject to the "Classpath" exception as provided
- * by Sun in the LICENSE file that accompanied this code.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
- * CA 95054 USA or visit www.sun.com if you need additional information or
- * have any questions.
- */
-
-
-/**
- * An optional interface that may be provided by service provider
- * objects that will be registered with a
- * <code>ServiceRegistry</code>.  If this interface is present,
- * notification of registration and deregistration will be performed.
- *
- * @see ServiceRegistry
- *
- */
-public interface RegisterableService {
-
-    /**
-     * Called when an object implementing this interface is added to
-     * the given <code>category</code> of the given
-     * <code>registry</code>.  The object may already be registered
-     * under another category or categories.
-     *
-     * @param registry a <code>ServiceRegistry</code> where this
-     * object has been registered.
-     * @param category a <code>Class</code> object indicating the
-     * registry category under which this object has been registered.
-     */
-    void onRegistration(ServiceRegistry registry, Class<?> category);
-
-    /**
-     * Called when an object implementing this interface is removed
-     * from the given <code>category</code> of the given
-     * <code>registry</code>.  The object may still be registered
-     * under another category or categories.
-     *
-     * @param registry a <code>ServiceRegistry</code> from which this
-     * object is being (wholly or partially) deregistered.
-     * @param category a <code>Class</code> object indicating the
-     * registry category from which this object is being deregistered.
-     */
-    void onDeregistration(ServiceRegistry registry, Class<?> category);
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java b/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java
deleted file mode 100644
index 1794adb5de2..00000000000
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/plugin/spi/ServiceRegistry.java
+++ /dev/null
@@ -1,861 +0,0 @@
-/*
-   Copyright (C) 2005-2012, by the President and Fellows of Harvard College.
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-         http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-   Dataverse Network - A web application to share, preserve and analyze research data.
-   Developed at the Institute for Quantitative Social Science, Harvard University.
-   Version 3.0.
-*/
-package edu.harvard.iq.dataverse.ingest.plugin.spi;
-
-
-
-// This file was Taken out from openjdk-6-src-b16-24_apr_2009.tar.gz
-// http://download.java.net/openjdk/jdk6/promoted/b16/openjdk-6-src-b16-24_apr_2009.tar.gz
-// downloaded: 2009-05-07
-
-
-/*
- * Copyright 2000-2007 Sun Microsystems, Inc.  All Rights Reserved.
- * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
- *
- * This code is free software; you can redistribute it and/or modify it
- * under the terms of the GNU General Public License version 2 only, as
- * published by the Free Software Foundation.  Sun designates this
- * particular file as subject to the "Classpath" exception as provided
- * by Sun in the LICENSE file that accompanied this code.
- *
- * This code is distributed in the hope that it will be useful, but WITHOUT
- * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
- * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
- * version 2 for more details (a copy is included in the LICENSE file that
- * accompanied this code).
- *
- * You should have received a copy of the GNU General Public License version
- * 2 along with this work; if not, write to the Free Software Foundation,
- * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
- *
- * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
- * CA 95054 USA or visit www.sun.com if you need additional information or
- * have any questions.
- */
-
-//package javax.imageio.spi;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NoSuchElementException;
-import java.util.Set;
-import java.util.ServiceLoader;
-
-/**
- * A registry for service provider instances.
- *
- * <p> A <i>service</i> is a well-known set of interfaces and (usually
- * abstract) classes.  A <i>service provider</i> is a specific
- * implementation of a service.  The classes in a provider typically
- * implement the interface or subclass the class defined by the
- * service itself.
- *
- * <p> Service providers are stored in one or more <i>categories</i>,
- * each of which is defined by a class of interface (described by a
- * <code>Class</code> object) that all of its members must implement.
- * The set of categories may be changed dynamically.
- *
- * <p> Only a single instance of a given leaf class (that is, the
- * actual class returned by <code>getClass()</code>, as opposed to any
- * inherited classes or interfaces) may be registered.  That is,
- * suppose that the
- * <code>com.mycompany.mypkg.GreenServiceProvider</code> class
- * implements the <code>com.mycompany.mypkg.MyService</code>
- * interface.  If a <code>GreenServiceProvider</code> instance is
- * registered, it will be stored in the category defined by the
- * <code>MyService</code> class.  If a new instance of
- * <code>GreenServiceProvider</code> is registered, it will replace
- * the previous instance.  In practice, service provider objects are
- * usually singletons so this behavior is appropriate.
- *
- * <p> To declare a service provider, a <code>services</code>
- * subdirectory is placed within the <code>META-INF</code> directory
- * that is present in every JAR file.  This directory contains a file
- * for each service provider interface that has one or more
- * implementation classes present in the JAR file.  For example, if
- * the JAR file contained a class named
- * <code>com.mycompany.mypkg.MyServiceImpl</code> which implements the
- * <code>javax.someapi.SomeService</code> interface, the JAR file
- * would contain a file named: <pre>
- * META-INF/services/javax.someapi.SomeService </pre>
- *
- * containing the line:
- *
- * <pre>
- * com.mycompany.mypkg.MyService
- * </pre>
- *
- * <p> The service provider classes should be to be lightweight and
- * quick to load.  Implementations of these interfaces should avoid
- * complex dependencies on other classes and on native code. The usual
- * pattern for more complex services is to register a lightweight
- * proxy for the heavyweight service.
- *
- * <p> An application may customize the contents of a registry as it
- * sees fit, so long as it has the appropriate runtime permission.
- *
- * <p> For more details on declaring service providers, and the JAR
- * format in general, see the <a
- * href="../../../../technotes/guides/jar/jar.html">
- * JAR File Specification</a>.
- *
- * @see RegisterableService
- *
- */
-public class ServiceRegistry {
-
-    // Class -> Registry
-    private Map<Class<?>, SubRegistry> categoryMap = new HashMap<>();
-
-    /**
-     * Constructs a <code>ServiceRegistry</code> instance with a
-     * set of categories taken from the <code>categories</code>
-     * argument.
-     *
-     * @param categories an <code>Iterator</code> containing
-     * <code>Class</code> objects to be used to define categories.
-     *
-     * @exception IllegalArgumentException if
-     * <code>categories</code> is <code>null</code>.
-     */
-    public ServiceRegistry(Iterator<Class<?>> categories) {
-        if (categories == null) {
-            throw new IllegalArgumentException("categories == null!");
-        }
-        while (categories.hasNext()) {
-            Class<?> category = categories.next();
-            SubRegistry reg = new SubRegistry(this, category);
-            categoryMap.put(category, reg);
-        }
-    }
-
-    // The following two methods expose functionality from
-    // sun.misc.Service.  If that class is made public, they may be
-    // removed.
-    //
-    // The sun.misc.ServiceConfigurationError class may also be
-    // exposed, in which case the references to 'an
-    // <code>Error</code>' below should be changed to 'a
-    // <code>ServiceConfigurationError</code>'.
-
-    /**
-     * Searches for implementations of a particular service class
-     * using the given class loader.
-     *
-     * <p> This method transforms the name of the given service class
-     * into a provider-configuration filename as described in the
-     * class comment and then uses the <code>getResources</code>
-     * method of the given class loader to find all available files
-     * with that name.  These files are then read and parsed to
-     * produce a list of provider-class names.  The iterator that is
-     * returned uses the given class loader to look up and then
-     * instantiate each element of the list.
-     *
-     * <p> Because it is possible for extensions to be installed into
-     * a running Java virtual machine, this method may return
-     * different results each time it is invoked.
-     *
-     * @param providerClass a <code>Class</code>object indicating the
-     * class or interface of the service providers being detected.
-     *
-     * @param loader the class loader to be used to load
-     * provider-configuration files and instantiate provider classes,
-     * or <code>null</code> if the system class loader (or, failing that
-     * the bootstrap class loader) is to be used.
-     *
-     * @return An <code>Iterator</code> that yields provider objects
-     * for the given service, in some arbitrary order.  The iterator
-     * will throw an <code>Error</code> if a provider-configuration
-     * file violates the specified format or if a provider class
-     * cannot be found and instantiated.
-     *
-     * @exception IllegalArgumentException if
-     * <code>providerClass</code> is <code>null</code>.
-     */
-    public static <T> Iterator<T> lookupProviders(Class<T> providerClass,
-                                                  ClassLoader loader)
-    {
-        if (providerClass == null) {
-            throw new IllegalArgumentException("providerClass == null!");
-        }
-        return ServiceLoader.load(providerClass, loader).iterator();
-    }
-
-    /**
-     * Locates and incrementally instantiates the available providers
-     * of a given service using the context class loader.  This
-     * convenience method is equivalent to:
-     *
-     * <pre>
-     *   ClassLoader cl = Thread.currentThread().getContextClassLoader();
-     *   return Service.providers(service, cl);
-     * </pre>
-     *
-     * @param providerClass a <code>Class</code>object indicating the
-     * class or interface of the service providers being detected.
-     *
-     * @return An <code>Iterator</code> that yields provider objects
-     * for the given service, in some arbitrary order.  The iterator
-     * will throw an <code>Error</code> if a provider-configuration
-     * file violates the specified format or if a provider class
-     * cannot be found and instantiated.
-     *
-     * @exception IllegalArgumentException if
-     * <code>providerClass</code> is <code>null</code>.
-     */
-    public static <T> Iterator<T> lookupProviders(Class<T> providerClass) {
-        if (providerClass == null) {
-            throw new IllegalArgumentException("providerClass == null!");
-        }
-        return ServiceLoader.load(providerClass).iterator();
-    }
-
-    /**
-     * Returns an <code>Iterator</code> of <code>Class</code> objects
-     * indicating the current set of categories.  The iterator will be
-     * empty if no categories exist.
-     *
-     * @return an <code>Iterator</code> containing
-     * <code>Class</code>objects.
-     */
-    public Iterator<Class<?>> getCategories() {
-        Set<Class<?>> keySet = categoryMap.keySet();
-        return keySet.iterator();
-    }
-
-    /**
-     * Returns an Iterator containing the subregistries to which the
-     * provider belongs.
-     */
-    private Iterator<SubRegistry> getSubRegistries(Object provider) {
-        List<SubRegistry> l = new ArrayList<>();
-        Iterator<Class<?>> iter = categoryMap.keySet().iterator();
-        while (iter.hasNext()) {
-            Class<?> c = iter.next();
-            if (c.isAssignableFrom(provider.getClass())) {
-                l.add(categoryMap.get(c));
-            }
-        }
-        return l.iterator();
-    }
-
-    /**
-     * Adds a service provider object to the registry.  The provider
-     * is associated with the given category.
-     *
-     * <p> If <code>provider</code> implements the
-     * <code>RegisterableService</code> interface, its
-     * <code>onRegistration</code> method will be called.  Its
-     * <code>onDeregistration</code> method will be called each time
-     * it is deregistered from a category, for example if a
-     * category is removed or the registry is garbage collected.
-     *
-     * @param provider the service provide object to be registered.
-     * @param category the category under which to register the
-     * provider.
-     *
-     * @return true if no provider of the same class was previously
-     * registered in the same category category.
-     *
-     * @exception IllegalArgumentException if <code>provider</code> is
-     * <code>null</code>.
-     * @exception IllegalArgumentException if there is no category
-     * corresponding to <code>category</code>.
-     * @exception ClassCastException if provider does not implement
-     * the <code>Class</code> defined by <code>category</code>.
-     */
-    public <T> boolean registerServiceProvider(T provider,
-                                               Class<T> category) {
-        if (provider == null) {
-            throw new IllegalArgumentException("provider == null!");
-        }
-        SubRegistry reg = categoryMap.get(category);
-        if (reg == null) {
-            throw new IllegalArgumentException("category unknown!");
-        }
-        if (!category.isAssignableFrom(provider.getClass())) {
-            throw new ClassCastException();
-        }
-
-        return reg.registerServiceProvider(provider);
-    }
-
-    /**
-     * Adds a service provider object to the registry.  The provider
-     * is associated within each category present in the registry
-     * whose <code>Class</code> it implements.
-     *
-     * <p> If <code>provider</code> implements the
-     * <code>RegisterableService</code> interface, its
-     * <code>onRegistration</code> method will be called once for each
-     * category it is registered under.  Its
-     * <code>onDeregistration</code> method will be called each time
-     * it is deregistered from a category or when the registry is
-     * finalized.
-     *
-     * @param provider the service provider object to be registered.
-     *
-     * @exception IllegalArgumentException if
-     * <code>provider</code> is <code>null</code>.
-     */
-    public void registerServiceProvider(Object provider) {
-        if (provider == null) {
-            throw new IllegalArgumentException("provider == null!");
-        }
-        Iterator<SubRegistry> regs = getSubRegistries(provider);
-        while (regs.hasNext()) {
-            SubRegistry reg = regs.next();
-            reg.registerServiceProvider(provider);
-        }
-    }
-
-    /**
-     * Adds a set of service provider objects, taken from an
-     * <code>Iterator</code> to the registry.  Each provider is
-     * associated within each category present in the registry whose
-     * <code>Class</code> it implements.
-     *
-     * <p> For each entry of <code>providers</code> that implements
-     * the <code>RegisterableService</code> interface, its
-     * <code>onRegistration</code> method will be called once for each
-     * category it is registered under.  Its
-     * <code>onDeregistration</code> method will be called each time
-     * it is deregistered from a category or when the registry is
-     * finalized.
-     *
-     * @param providers an Iterator containing service provider
-     * objects to be registered.
-     *
-     * @exception IllegalArgumentException if <code>providers</code>
-     * is <code>null</code> or contains a <code>null</code> entry.
-     */
-    public void registerServiceProviders(Iterator<?> providers) {
-        if (providers == null) {
-            throw new IllegalArgumentException("provider == null!");
-        }
-        while (providers.hasNext()) {
-            registerServiceProvider(providers.next());
-        }
-    }
-
-    /**
-     * Removes a service provider object from the given category.  If
-     * the provider was not previously registered, nothing happens and
-     * <code>false</code> is returned.  Otherwise, <code>true</code>
-     * is returned.  If an object of the same class as
-     * <code>provider</code> but not equal (using <code>==</code>) to
-     * <code>provider</code> is registered, it will not be
-     * deregistered.
-     *
-     * <p> If <code>provider</code> implements the
-     * <code>RegisterableService</code> interface, its
-     * <code>onDeregistration</code> method will be called.
-     *
-     * @param provider the service provider object to be deregistered.
-     * @param category the category from which to deregister the
-     * provider.
-     *
-     * @return <code>true</code> if the provider was previously
-     * registered in the same category category,
-     * <code>false</code> otherwise.
-     *
-     * @exception IllegalArgumentException if <code>provider</code> is
-     * <code>null</code>.
-     * @exception IllegalArgumentException if there is no category
-     * corresponding to <code>category</code>.
-     * @exception ClassCastException if provider does not implement
-     * the class defined by <code>category</code>.
-     */
-    public <T> boolean deregisterServiceProvider(T provider,
-                                                 Class<T> category) {
-        if (provider == null) {
-            throw new IllegalArgumentException("provider == null!");
-        }
-        SubRegistry reg = categoryMap.get(category);
-        if (reg == null) {
-            throw new IllegalArgumentException("category unknown!");
-        }
-        if (!category.isAssignableFrom(provider.getClass())) {
-            throw new ClassCastException();
-        }
-        return reg.deregisterServiceProvider(provider);
-    }
-
-    /**
-     * Removes a service provider object from all categories that
-     * contain it.
-     *
-     * @param provider the service provider object to be deregistered.
-     *
-     * @exception IllegalArgumentException if <code>provider</code> is
-     * <code>null</code>.
-     */
-    public void deregisterServiceProvider(Object provider) {
-        if (provider == null) {
-            throw new IllegalArgumentException("provider == null!");
-        }
-        Iterator<SubRegistry> regs = getSubRegistries(provider);
-        while (regs.hasNext()) {
-            SubRegistry reg = regs.next();
-            reg.deregisterServiceProvider(provider);
-        }
-    }
-
-    /**
-     * Returns <code>true</code> if <code>provider</code> is currently
-     * registered.
-     *
-     * @param provider the service provider object to be queried.
-     *
-     * @return <code>true</code> if the given provider has been
-     * registered.
-     *
-     * @exception IllegalArgumentException if <code>provider</code> is
-     * <code>null</code>.
-     */
-    public boolean contains(Object provider) {
-        if (provider == null) {
-            throw new IllegalArgumentException("provider == null!");
-        }
-        Iterator<SubRegistry> regs = getSubRegistries(provider);
-        while (regs.hasNext()) {
-            SubRegistry reg = regs.next();
-            if (reg.contains(provider)) {
-                return true;
-            }
-        }
-
-        return false;
-    }
-
-    /**
-     * Returns an <code>Iterator</code> containing all registered
-     * service providers in the given category.  If
-     * <code>useOrdering</code> is <code>false</code>, the iterator
-     * will return all of the server provider objects in an arbitrary
-     * order.  Otherwise, the ordering will respect any pairwise
-     * orderings that have been set.  If the graph of pairwise
-     * orderings contains cycles, any providers that belong to a cycle
-     * will not be returned.
-     *
-     * @param category the category to be retrieved from.
-     * @param useOrdering <code>true</code> if pairwise orderings
-     * should be taken account in ordering the returned objects.
-     *
-     * @return an <code>Iterator</code> containing service provider
-     * objects from the given category, possibly in order.
-     *
-     * @exception IllegalArgumentException if there is no category
-     * corresponding to <code>category</code>.
-     */
-    public <T> Iterator<T> getServiceProviders(Class<T> category,
-                                               boolean useOrdering) {
-        SubRegistry reg = categoryMap.get(category);
-        if (reg == null) {
-            throw new IllegalArgumentException("category unknown!");
-        }
-        return reg.getServiceProviders(useOrdering);
-    }
-
-    /**
-     * A simple filter interface used by
-     * <code>ServiceRegistry.getServiceProviders</code> to select
-     * providers matching an arbitrary criterion.  Classes that
-     * implement this interface should be defined in order to make use
-     * of the <code>getServiceProviders</code> method of
-     * <code>ServiceRegistry</code> that takes a <code>Filter</code>.
-     *
-     * @see ServiceRegistry#getServiceProviders(Class, ServiceRegistry.Filter, boolean)
-     */
-    public interface Filter {
-
-        /**
-         * Returns <code>true</code> if the given
-         * <code>provider</code> object matches the criterion defined
-         * by this <code>Filter</code>.
-         *
-         * @param provider a service provider <code>Object</code>.
-         *
-         * @return true if the provider matches the criterion.
-         */
-        boolean filter(Object provider);
-    }
-
-    /**
-     * Returns an <code>Iterator</code> containing service provider
-     * objects within a given category that satisfy a criterion
-     * imposed by the supplied <code>ServiceRegistry.Filter</code>
-     * object's <code>filter</code> method.
-     *
-     * <p> The <code>useOrdering</code> argument controls the
-     * ordering of the results using the same rules as
-     * <code>getServiceProviders(Class, boolean)</code>.
-     *
-     * @param category the category to be retrieved from.
-     * @param filter an instance of <code>ServiceRegistry.Filter</code>
-     * whose <code>filter</code> method will be invoked.
-     * @param useOrdering <code>true</code> if pairwise orderings
-     * should be taken account in ordering the returned objects.
-     *
-     * @return an <code>Iterator</code> containing service provider
-     * objects from the given category, possibly in order.
-     *
-     * @exception IllegalArgumentException if there is no category
-     * corresponding to <code>category</code>.
-     */
-    public <T> Iterator<T> getServiceProviders(Class<T> category,
-                                               Filter filter,
-                                               boolean useOrdering) {
-        SubRegistry reg = categoryMap.get(category);
-        if (reg == null) {
-            throw new IllegalArgumentException("category unknown!");
-        }
-        Iterator iter = getServiceProviders(category, useOrdering);
-        return new FilterIterator(iter, filter);
-    }
-
-    /**
-     * Returns the currently registered service provider object that
-     * is of the given class type.  At most one object of a given
-     * class is allowed to be registered at any given time.  If no
-     * registered object has the desired class type, <code>null</code>
-     * is returned.
-     *
-     * @param providerClass the <code>Class</code> of the desired
-     * service provider object.
-     *
-     * @return a currently registered service provider object with the
-     * desired <code>Class</code>type, or <code>null</code> is none is
-     * present.
-     *
-     * @exception IllegalArgumentException if <code>providerClass</code> is
-     * <code>null</code>.
-     */
-    public <T> T getServiceProviderByClass(Class<T> providerClass) {
-        if (providerClass == null) {
-            throw new IllegalArgumentException("providerClass == null!");
-        }
-        for (Class c : categoryMap.keySet()) {
-            if (c.isAssignableFrom(providerClass)) {
-                SubRegistry reg = (SubRegistry)categoryMap.get(c);
-                T provider = reg.getServiceProviderByClass(providerClass);
-                if (provider != null) {
-                    return provider;
-                }
-            }
-        }
-        return null;
-    }
-
-    /**
-     * Sets a pairwise ordering between two service provider objects
-     * within a given category.  If one or both objects are not
-     * currently registered within the given category, or if the
-     * desired ordering is already set, nothing happens and
-     * <code>false</code> is returned.  If the providers previously
-     * were ordered in the reverse direction, that ordering is
-     * removed.
-     *
-     * <p> The ordering will be used by the
-     * <code>getServiceProviders</code> methods when their
-     * <code>useOrdering</code> argument is <code>true</code>.
-     *
-     * @param category a <code>Class</code> object indicating the
-     * category under which the preference is to be established.
-     * @param firstProvider the preferred provider.
-     * @param secondProvider the provider to which
-     * <code>firstProvider</code> is preferred.
-     *
-     * @return <code>true</code> if a previously unset ordering
-     * was established.
-     *
-     * @exception IllegalArgumentException if either provider is
-     * <code>null</code> or they are the same object.
-     * @exception IllegalArgumentException if there is no category
-     * corresponding to <code>category</code>.
-     */
-    public <T> boolean setOrdering(Class<T> category,
-                                   T firstProvider,
-                                   T secondProvider) {
-        if (firstProvider == null || secondProvider == null) {
-            throw new IllegalArgumentException("provider is null!");
-        }
-        if (firstProvider == secondProvider) {
-            throw new IllegalArgumentException("providers are the same!");
-        }
-        SubRegistry reg = (SubRegistry)categoryMap.get(category);
-        if (reg == null) {
-            throw new IllegalArgumentException("category unknown!");
-        }
-        if (reg.contains(firstProvider) &&
-            reg.contains(secondProvider)) {
-            return reg.setOrdering(firstProvider, secondProvider);
-        }
-        return false;
-    }
-
-    /**
-     * Sets a pairwise ordering between two service provider objects
-     * within a given category.  If one or both objects are not
-     * currently registered within the given category, or if no
-     * ordering is currently set between them, nothing happens
-     * and <code>false</code> is returned.
-     *
-     * <p> The ordering will be used by the
-     * <code>getServiceProviders</code> methods when their
-     * <code>useOrdering</code> argument is <code>true</code>.
-     *
-     * @param category a <code>Class</code> object indicating the
-     * category under which the preference is to be disestablished.
-     * @param firstProvider the formerly preferred provider.
-     * @param secondProvider the provider to which
-     * <code>firstProvider</code> was formerly preferred.
-     *
-     * @return <code>true</code> if a previously set ordering was
-     * disestablished.
-     *
-     * @exception IllegalArgumentException if either provider is
-     * <code>null</code> or they are the same object.
-     * @exception IllegalArgumentException if there is no category
-     * corresponding to <code>category</code>.
-     */
-    public <T> boolean unsetOrdering(Class<T> category,
-                                     T firstProvider,
-                                     T secondProvider) {
-        if (firstProvider == null || secondProvider == null) {
-            throw new IllegalArgumentException("provider is null!");
-        }
-        if (firstProvider == secondProvider) {
-            throw new IllegalArgumentException("providers are the same!");
-        }
-        SubRegistry reg = (SubRegistry)categoryMap.get(category);
-        if (reg == null) {
-            throw new IllegalArgumentException("category unknown!");
-        }
-        if (reg.contains(firstProvider) &&
-            reg.contains(secondProvider)) {
-            return reg.unsetOrdering(firstProvider, secondProvider);
-        }
-        return false;
-    }
-
-    /**
-     * Deregisters all service provider object currently registered
-     * under the given category.
-     *
-     * @param category the category to be emptied.
-     *
-     * @exception IllegalArgumentException if there is no category
-     * corresponding to <code>category</code>.
-     */
-    public void deregisterAll(Class<?> category) {
-        SubRegistry reg = (SubRegistry)categoryMap.get(category);
-        if (reg == null) {
-            throw new IllegalArgumentException("category unknown!");
-        }
-        reg.clear();
-    }
-
-    /**
-     * Deregisters all currently registered service providers from all
-     * categories.
-     */
-    public void deregisterAll() {
-        Iterator iter = categoryMap.values().iterator();
-        while (iter.hasNext()) {
-            SubRegistry reg = (SubRegistry)iter.next();
-            reg.clear();
-        }
-    }
-
-    /**
-     * Finalizes this object prior to garbage collection.  The
-     * <code>deregisterAll</code> method is called to deregister all
-     * currently registered service providers.  This method should not
-     * be called from application code.
-     *
-     * @exception Throwable if an error occurs during superclass
-     * finalization.
-     */
-    public void finalize() throws Throwable {
-        deregisterAll();
-        super.finalize();
-    }
-}
-
-
-/**
- * A portion of a registry dealing with a single superclass or
- * interface.
- */
-class SubRegistry {
-
-    ServiceRegistry registry;
-
-    Class<?> category;
-
-    // Provider Objects organized by partial oridering
-    PartiallyOrderedSet poset = new PartiallyOrderedSet();
-
-    // Class -> Provider Object of that class
-    Map<Class<?>,Object> map = new HashMap<>();
-
-    public SubRegistry(ServiceRegistry registry, Class<?> category) {
-        this.registry = registry;
-        this.category = category;
-    }
-
-    public boolean registerServiceProvider(Object provider) {
-        Object oprovider = map.get(provider.getClass());
-        boolean present =  oprovider != null;
-
-        if (present) {
-            deregisterServiceProvider(oprovider);
-        }
-        map.put(provider.getClass(), provider);
-        poset.add(provider);
-        if (provider instanceof RegisterableService) {
-            RegisterableService rs = (RegisterableService)provider;
-            rs.onRegistration(registry, category);
-        }
-
-        return !present;
-    }
-
-    /**
-     * If the provider was not previously registered, do nothing.
-     *
-     * @return true if the provider was previously registered.
-     */
-    public boolean deregisterServiceProvider(Object provider) {
-        Object oprovider = map.get(provider.getClass());
-
-        if (provider == oprovider) {
-            map.remove(provider.getClass());
-            poset.remove(provider);
-            if (provider instanceof RegisterableService) {
-                RegisterableService rs = (RegisterableService)provider;
-                rs.onDeregistration(registry, category);
-            }
-
-            return true;
-        }
-        return false;
-    }
-
-    public boolean contains(Object provider) {
-        Object oprovider = map.get(provider.getClass());
-        return oprovider == provider;
-    }
-
-    public boolean setOrdering(Object firstProvider,
-                               Object secondProvider) {
-        return poset.setOrdering(firstProvider, secondProvider);
-    }
-
-    public boolean unsetOrdering(Object firstProvider,
-                                 Object secondProvider) {
-        return poset.unsetOrdering(firstProvider, secondProvider);
-    }
-
-    public Iterator getServiceProviders(boolean useOrdering) {
-        if (useOrdering) {
-            return poset.iterator();
-        } else {
-            return map.values().iterator();
-        }
-    }
-
-    public <T> T getServiceProviderByClass(Class<T> providerClass) {
-        return (T)map.get(providerClass);
-    }
-
-    public void clear() {
-        Iterator iter = map.values().iterator();
-        while (iter.hasNext()) {
-            Object provider = iter.next();
-            iter.remove();
-
-            if (provider instanceof RegisterableService) {
-                RegisterableService rs = (RegisterableService)provider;
-                rs.onDeregistration(registry, category);
-            }
-        }
-        poset.clear();
-    }
-
-    public void finalize() {
-        clear();
-    }
-}
-
-
-/**
- * A class for wrapping <code>Iterators</code> with a filter function.
- * This provides an iterator for a subset without duplication.
- */
-class FilterIterator<T> implements Iterator<T> {
-
-    private Iterator<T> iter;
-    private ServiceRegistry.Filter filter;
-
-    private T next = null;
-
-    public FilterIterator(Iterator<T> iter,
-                          ServiceRegistry.Filter filter) {
-        this.iter = iter;
-        this.filter = filter;
-        advance();
-    }
-
-    private void advance() {
-        while (iter.hasNext()) {
-            T elt = iter.next();
-            if (filter.filter(elt)) {
-                next = elt;
-                return;
-            }
-        }
-
-        next = null;
-    }
-
-    public boolean hasNext() {
-        return next != null;
-    }
-
-    public T next() {
-        if (next == null) {
-            throw new NoSuchElementException();
-        }
-        T o = next;
-        advance();
-        return o;
-    }
-
-    public void remove() {
-        throw new UnsupportedOperationException();
-    }
-}
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
index f321f507d30..0822f6eed72 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReader.java
@@ -281,6 +281,20 @@ public String readString(int n) throws IOException {
         }
         return ret;
     }
+    
+    /* 
+     * Same, but expecting potential Unicode characters.
+     */
+    public String readUtfString(int n) throws IOException {
+
+        String ret = new String(readBytes(n), "UTF8");
+
+        // Remove the terminating and/or padding zero bytes:
+        if (ret.indexOf(0) > -1) {
+            return ret.substring(0, ret.indexOf(0));
+        }
+        return ret;
+    }
 
     /* 
      * More complex helper methods for reading NewDTA "sections" ...
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
index 994b4901bee..22581834676 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReader.java
@@ -687,7 +687,7 @@ private void readVariableLabels(DataReader reader) throws IOException {
         reader.readOpeningTag(TAG_VARIABLE_LABELS);
 
         for (int i = 0; i < dataTable.getVarQuantity(); i++) {
-            String variableLabel = reader.readString(DTAVersion == 117? 81: 321);
+            String variableLabel = reader.readUtfString(DTAVersion == 117? 81: 321);
             logger.fine("variable " + i + ": label=" + variableLabel);
             if ((variableLabel != null) && (!variableLabel.equals(""))) {
                 dataTable.getDataVariables().get(i).setLabel(variableLabel);
@@ -1213,7 +1213,7 @@ private void readValueLabels(DataReader reader) throws IOException {
                 }
                 label_length = (int)(label_end - label_offset);
 
-                category_value_labels[i] = new String(Arrays.copyOfRange(labelBytes, (int)label_offset, (int)label_end-1), "US-ASCII");
+                category_value_labels[i] = new String(Arrays.copyOfRange(labelBytes, (int)label_offset, (int)label_end-1), "UTF8");
                 total_label_bytes += label_length;
             }
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
index 6d17a5bd553..eb1353fd792 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/rdata/RDATAFileReader.java
@@ -21,14 +21,12 @@
 
 
 import java.io.*;
-import java.io.FileReader;
 import java.io.InputStreamReader;
 import java.text.*;
 import java.util.logging.*;
 import java.util.*;
-import java.security.NoSuchAlgorithmException;
 
-import javax.inject.Inject;
+import jakarta.inject.Inject;
 
 // Rosuda Wrappers and Methods for R-calls to Rserve
 import edu.harvard.iq.dataverse.settings.JvmSettings;
@@ -43,18 +41,14 @@
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import edu.harvard.iq.dataverse.datavariable.VariableCategory;
 
-import edu.harvard.iq.dataverse.ingest.plugin.spi.*;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader;
 import edu.harvard.iq.dataverse.ingest.tabulardata.spi.TabularDataFileReaderSpi;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
 import edu.harvard.iq.dataverse.rserve.*;
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
 
 
 import org.apache.commons.lang3.RandomStringUtils;
-import org.apache.commons.lang3.ArrayUtils;
+
 /**
  * Dataverse 4.0 implementation of <code>TabularDataFileReader</code> for the 
  * RData Binary Format.
diff --git a/src/main/java/edu/harvard/iq/dataverse/license/License.java b/src/main/java/edu/harvard/iq/dataverse/license/License.java
index c6e2cdbc2e5..fe19073ab8d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/license/License.java
+++ b/src/main/java/edu/harvard/iq/dataverse/license/License.java
@@ -1,15 +1,15 @@
 package edu.harvard.iq.dataverse.license;
 
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
-import javax.persistence.UniqueConstraint;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Table;
+import jakarta.persistence.UniqueConstraint;
 
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java b/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java
index 4f874132128..26b7ca63a29 100644
--- a/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/license/LicenseConverter.java
@@ -6,11 +6,11 @@
 
 package edu.harvard.iq.dataverse.license;
 
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
 
 @FacesConverter("licenseConverter")
 public class LicenseConverter implements Converter {
diff --git a/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java
index b554fecd437..93f4958038c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/license/LicenseServiceBean.java
@@ -3,14 +3,15 @@
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
 import edu.harvard.iq.dataverse.api.AbstractApiBean.WrappedResponse;
+import static edu.harvard.iq.dataverse.dataset.DatasetUtil.getLocalizedLicenseName;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.PersistenceException;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.PersistenceException;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -64,6 +65,31 @@ public License getByNameOrUri(String nameOrUri) {
             return null;
         }
     }
+    
+    public License getByPotentiallyLocalizedName(String name) {
+        // First, try the name against the name column in the License table, 
+        // verbatim: 
+        License license = getByNameOrUri(name); 
+        if (license != null) {
+            return license; 
+        }
+        
+        // Then, if still here, go through the list, see if any of the names
+        // match this string as a translated name:
+        List<License> allActiveLicenses = listAllActive();
+        if (allActiveLicenses == null) {
+            return null; 
+        }
+        for (License activeLicense : allActiveLicenses) {
+            // This is DatasetUtil.getLicenseName(), it will return the 
+            // localized/translated name, if available.
+            if (name.equals(getLocalizedLicenseName(activeLicense))) {
+                return activeLicense;
+            }
+        }
+        
+        return null; 
+    }
 
     public int setDefault(Long id) throws WrappedResponse{
         License candidate = getById(id);
diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java b/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java
index 33486128e7b..5844c8b6ab9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/locality/DvObjectStorageLocation.java
@@ -2,11 +2,11 @@
 
 import edu.harvard.iq.dataverse.DvObject;
 import java.io.Serializable;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToOne;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
 
 /**
  * Future use, maybe. Once we're happy with the design we'll enable it as an
diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java
index d873b9f8989..c074cb5918f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java
+++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSite.java
@@ -2,13 +2,13 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
 
 @Entity
 public class StorageSite implements Serializable {
diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java
index c7057ab9318..781e896e9a7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteServiceBean.java
@@ -2,12 +2,12 @@
 
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.Stateless;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Stateless;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 @Stateless
 public class StorageSiteServiceBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java
index ebc2bb0f19f..6ff0f7ca379 100644
--- a/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/locality/StorageSiteUtil.java
@@ -2,7 +2,7 @@
 
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.List;
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
 public class StorageSiteUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java
index 3c1c0bc0c68..469f3abe9da 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitations.java
@@ -7,14 +7,14 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import java.io.Serializable;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.validation.constraints.NotNull;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.validation.constraints.NotNull;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java
index c05fc9b1a4e..50c24274bb2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetExternalCitationsServiceBean.java
@@ -10,16 +10,16 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonValue;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
index fe0565c3ff8..ac3dff356eb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetrics.java
@@ -3,15 +3,15 @@
 import edu.harvard.iq.dataverse.Dataset;
 import java.io.Serializable;
 
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Transient;
-import javax.validation.constraints.NotNull;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Transient;
+import jakarta.validation.constraints.NotNull;
 
 /**
  * Cached versions of views, downloads, and citations to show in the UI and API.
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java
index 39afdf318ad..0fb7e9f1e6c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java
@@ -3,28 +3,24 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
-import java.io.StringReader;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
 import java.math.BigDecimal;
-import java.sql.Timestamp;
 import java.util.ArrayList;
-import java.util.Date;
 import java.util.List;
 import java.util.ListIterator;
 import java.util.Set;
-import java.util.concurrent.Future;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.json.JsonValue;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
 
 /**
  *
@@ -128,9 +124,7 @@ public List<DatasetMetrics> parseSushiReport(JsonObject report, Dataset dataset)
             List<DatasetMetrics> datasetMetricsDataset = new ArrayList<>();
             String globalId = null;
             Dataset ds = null;
-            StringReader rdr = new StringReader(reportDataset.toString());
-            JsonReader jrdr = Json.createReader(rdr);
-            JsonObject obj = jrdr.readObject();
+            JsonObject obj = JsonUtil.getJsonObject(reportDataset.toString());
             String jsonGlobalId = "";
             String globalIdType = "";
             if (obj.containsKey("dataset-id")) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java
index e1a635a9d6c..5edf2fde0c3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBean.java
@@ -16,14 +16,14 @@
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.TimeZone;
-import javax.ejb.EJB;
-import javax.enterprise.context.RequestScoped;
-import javax.faces.context.FacesContext;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.UriInfo;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpSession;
+import jakarta.ws.rs.core.HttpHeaders;
+import jakarta.ws.rs.core.UriInfo;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
index f3d45642083..8f32750f090 100644
--- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtil.java
@@ -1,17 +1,16 @@
 package edu.harvard.iq.dataverse.makedatacount;
 
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonValue;
-import javax.persistence.Transient;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.persistence.Transient;
 
 /**
  * See doc/sphinx-guides/source/admin/make-data-count.rst for user facing docs
diff --git a/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java
index 88af8478a8f..33f8277919a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/metadataimport/ForeignMetadataImportServiceBean.java
@@ -2,7 +2,6 @@
 package edu.harvard.iq.dataverse.metadataimport;
 
 
-import edu.harvard.iq.dataverse.ControlledVocabularyValue;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
@@ -17,13 +16,13 @@
 import java.io.IOException;
 import java.util.logging.Logger;
 import java.io.StringReader;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.PersistenceContext;
 import javax.xml.stream.XMLStreamConstants;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java
index 50c8c4098a1..1b5619c53e0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsServiceBean.java
@@ -2,8 +2,8 @@
 
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.GuestbookResponse;
 import edu.harvard.iq.dataverse.Metric;
-import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics;
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil.MetricType;
 
 import static edu.harvard.iq.dataverse.metrics.MetricsUtil.*;
@@ -23,19 +23,19 @@
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.Query;
-import javax.ws.rs.core.UriInfo;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.Query;
+import jakarta.ws.rs.core.UriInfo;
 
 @Stateless
 public class MetricsServiceBean implements Serializable {
@@ -51,7 +51,7 @@ public class MetricsServiceBean implements Serializable {
 
     /** Dataverses */
 
-    
+
     public JsonArray getDataversesTimeSeries(UriInfo uriInfo, Dataverse d) {
         Query query = em.createNativeQuery(""
                 + "select distinct to_char(date_trunc('month', dvobject.publicationdate),'YYYY-MM') as month, count(date_trunc('month', dvobject.publicationdate))\n"
@@ -64,7 +64,7 @@ public JsonArray getDataversesTimeSeries(UriInfo uriInfo, Dataverse d) {
         List<Object[]> results = query.getResultList();
         return MetricsUtil.timeSeriesToJson(results);
     }
-    
+
     /**
      * @param yyyymm Month in YYYY-MM format.
      * @param d
@@ -129,9 +129,9 @@ public List<Object[]> dataversesBySubject(Dataverse d) {
 
     /** Datasets */
 
-    
+
     public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dataverse d) {
-        Query query = em.createNativeQuery(                
+        Query query = em.createNativeQuery(
                 "select distinct date, count(dataset_id)\n"
                 + "from (\n"
                 + "select min(to_char(COALESCE(releasetime, createtime), 'YYYY-MM')) as date, dataset_id\n"
@@ -149,8 +149,8 @@ public JsonArray getDatasetsTimeSeries(UriInfo uriInfo, String dataLocation, Dat
         List<Object[]> results = query.getResultList();
         return MetricsUtil.timeSeriesToJson(results);
     }
-    
-    
+
+
     /**
      * @param yyyymm Month in YYYY-MM format.
      * @param d
@@ -180,10 +180,10 @@ public long datasetsToMonth(String yyyymm, String dataLocation, Dataverse d) {
         // But do not use this notation if you need the values returned to
         // meaningfully identify the datasets!
 
-  
+
         Query query = em.createNativeQuery(
-                        
-                        
+
+
                 "select count(*)\n"
                         + "from (\n"
                         + "select datasetversion.dataset_id || ':' || max(datasetversion.versionnumber + (.1 * datasetversion.minorversionnumber))\n"
@@ -312,7 +312,7 @@ public JsonArray filesTimeSeries(Dataverse d) {
         return MetricsUtil.timeSeriesToJson(results);
     }
 
-    
+
     /**
      * @param yyyymm Month in YYYY-MM format.
      * @param d
@@ -383,13 +383,13 @@ public JsonArray filesByType(Dataverse d) {
                 jab.add(stats);
             }
 
-        } catch (javax.persistence.NoResultException nr) {
+        } catch (NoResultException nr) {
             // do nothing
         }
         return jab.build();
 
     }
-    
+
     public JsonArray filesByTypeTimeSeries(Dataverse d, boolean published) {
         Query query = em.createNativeQuery("SELECT DISTINCT to_char(" + (published ? "ob.publicationdate" : "ob.createdate") + ",'YYYY-MM') as date, df.contenttype, count(df.id), coalesce(sum(df.filesize),0) "
                 + " FROM DataFile df, DvObject ob"
@@ -402,13 +402,13 @@ public JsonArray filesByTypeTimeSeries(Dataverse d, boolean published) {
         logger.log(Level.FINE, "Metric query: {0}", query);
         List<Object[]> results = query.getResultList();
         return MetricsUtil.timeSeriesByTypeToJson(results);
-        
+
     }
-    /** Downloads 
+    /** Downloads
      * @param d
      * @throws ParseException */
 
-    
+
     public JsonArray downloadsTimeSeries(Dataverse d) {
         // ToDo - published only?
         Query earlyDateQuery = em.createNativeQuery(""
@@ -425,17 +425,18 @@ public JsonArray downloadsTimeSeries(Dataverse d) {
                 + "select  distinct COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') as date, count(id)\n"
                 + "from guestbookresponse\n"
                 + ((d == null) ? "" : "where dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")")
+                + ((d == null) ? "where ":" and ") + "eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n"
                 + " group by COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "') order by  COALESCE(to_char(responsetime, 'YYYY-MM'),'" + earliest + "');");
 
         logger.log(Level.FINE, "Metric query: {0}", query);
         List<Object[]> results = query.getResultList();
         return MetricsUtil.timeSeriesToJson(results);
     }
-    
+
     /*
      * This includes getting historic download without a timestamp if query
      * is earlier than earliest timestamped record
-     * 
+     *
      * @param yyyymm Month in YYYY-MM format.
      */
     public long downloadsToMonth(String yyyymm, Dataverse d) throws ParseException {
@@ -455,9 +456,10 @@ public long downloadsToMonth(String yyyymm, Dataverse d) throws ParseException {
                 Query query = em.createNativeQuery(""
                         + "select count(id)\n"
                         + "from guestbookresponse\n"
-                        + "where date_trunc('month', responsetime) <=  to_date('" + yyyymm + "','YYYY-MM')"
-                        + "or responsetime is NULL\n" // includes historic guestbook records without date
-                    + ((d==null) ? ";": "AND dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ");") 
+                        + "where (date_trunc('month', responsetime) <=  to_date('" + yyyymm + "','YYYY-MM')"
+                        + "or responsetime is NULL)\n" // includes historic guestbook records without date
+                        + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n"
+                    + ((d==null) ? ";": "AND dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ");")
                 );
                 logger.log(Level.FINE, "Metric query: {0}", query);
                 return (long) query.getSingleResult();
@@ -478,18 +480,20 @@ public long downloadsPastDays(int days, Dataverse d) {
                 + "select count(id)\n"
                 + "from guestbookresponse\n"
                 + "where responsetime > current_date - interval '" + days + "' day\n"
+                + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n"
                 + ((d==null) ? ";": "AND dataset_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ");")
         );
         logger.log(Level.FINE, "Metric query: {0}", query);
 
         return (long) query.getSingleResult();
     }
-    
+
     public JsonArray fileDownloadsTimeSeries(Dataverse d, boolean uniqueCounts) {
         Query query = em.createNativeQuery("select distinct to_char(gb.responsetime, 'YYYY-MM') as date, ob.id, ob.protocol || ':' || ob.authority || '/' || ob.identifier as pid, count(" + (uniqueCounts ? "distinct email" : "*") + ") "
                 + " FROM guestbookresponse gb, DvObject ob"
                 + " where ob.id = gb.datafile_id "
                 + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")\n")
+                + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n"
                 + "group by gb.datafile_id, ob.id, ob.protocol, ob.authority, ob.identifier, to_char(gb.responsetime, 'YYYY-MM') order by to_char(gb.responsetime, 'YYYY-MM');");
 
         logger.log(Level.FINE, "Metric query: {0}", query);
@@ -497,13 +501,14 @@ public JsonArray fileDownloadsTimeSeries(Dataverse d, boolean uniqueCounts) {
         return MetricsUtil.timeSeriesByIDAndPIDToJson(results);
 
     }
-    
+
     public JsonArray fileDownloads(String yyyymm, Dataverse d, boolean uniqueCounts) {
         Query query = em.createNativeQuery("select ob.id, ob.protocol || ':' || ob.authority || '/' || ob.identifier as pid, count(" + (uniqueCounts ? "distinct email" : "*") + ") "
                 + " FROM guestbookresponse gb, DvObject ob"
                 + " where ob.id = gb.datafile_id "
                 + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")\n")
                 + " and date_trunc('month', gb.responsetime) <=  to_date('" + yyyymm + "','YYYY-MM')\n"
+                + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n"
                 + "group by gb.datafile_id, ob.id, ob.protocol, ob.authority, ob.identifier order by count desc;");
 
         logger.log(Level.FINE, "Metric query: {0}", query);
@@ -519,7 +524,7 @@ public JsonArray fileDownloads(String yyyymm, Dataverse d, boolean uniqueCounts)
                 job.add(MetricsUtil.COUNT, (long) result[2]);
                 jab.add(job);
             }
-        } catch (javax.persistence.NoResultException nr) {
+        } catch (NoResultException nr) {
             // do nothing
         }
         return jab.build();
@@ -530,6 +535,7 @@ public JsonArray uniqueDownloadsTimeSeries(Dataverse d) {
                 + " FROM guestbookresponse gb, DvObject ob"
                 + " where ob.id = gb.dataset_id "
                 + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n")
+                + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n"
                 + "group by gb.dataset_id, ob.protocol, ob.authority, ob.identifier, to_char(gb.responsetime, 'YYYY-MM') order by to_char(gb.responsetime, 'YYYY-MM');");
 
         logger.log(Level.FINE, "Metric query: {0}", query);
@@ -537,7 +543,7 @@ public JsonArray uniqueDownloadsTimeSeries(Dataverse d) {
         return MetricsUtil.timeSeriesByPIDToJson(results);
 
     }
-    
+
     public JsonArray uniqueDatasetDownloads(String yyyymm, Dataverse d) {
 
     //select distinct count(distinct email),dataset_id, date_trunc('month', responsetime)  from guestbookresponse group by dataset_id, date_trunc('month',responsetime) order by dataset_id,date_trunc('month',responsetime);
@@ -547,6 +553,7 @@ public JsonArray uniqueDatasetDownloads(String yyyymm, Dataverse d) {
                 + " where ob.id = gb.dataset_id "
                 + ((d == null) ? "" : " and ob.owner_id in (" + getCommaSeparatedIdStringForSubtree(d, "Dataverse") + ")\n")
                 + " and date_trunc('month', responsetime) <=  to_date('" + yyyymm + "','YYYY-MM')\n"
+                + "and eventtype!='" + GuestbookResponse.ACCESS_REQUEST +"'\n"
                 + "group by gb.dataset_id, ob.protocol, ob.authority, ob.identifier order by count(distinct email) desc;");
         JsonArrayBuilder jab = Json.createArrayBuilder();
         try {
@@ -558,16 +565,16 @@ public JsonArray uniqueDatasetDownloads(String yyyymm, Dataverse d) {
                 jab.add(job);
             }
 
-        } catch (javax.persistence.NoResultException nr) {
+        } catch (NoResultException nr) {
             // do nothing
         }
         return jab.build();
 
     }
-    
-    //MDC 
-    
-    
+
+    //MDC
+
+
     public JsonArray mdcMetricTimeSeries(MetricType metricType, String country, Dataverse d) {
         Query query = em.createNativeQuery("SELECT distinct substring(monthyear from 1 for 7) as date, coalesce(sum(" + metricType.toString() + "),0) as count FROM DatasetMetrics\n"
                 + ((d == null) ? "" : "WHERE dataset_id in ( " + getCommaSeparatedIdStringForSubtree(d, "Dataset") + ")\n")
@@ -718,7 +725,7 @@ public Metric getMetric(String name, String dataLocation, String dayString, Data
         Metric metric = null;
         try {
             metric = (Metric) query.getSingleResult();
-        } catch (javax.persistence.NoResultException nr) {
+        } catch (NoResultException nr) {
             // do nothing
             logger.fine("No result");
         } catch (NonUniqueResultException nur) {
@@ -739,7 +746,7 @@ public Metric getMetric(String name, String dataLocation, String dayString, Data
     // https://github.com/DANS-KNAW/dataverse/blob/dans-develop/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsDansServiceBean.java
 
     /**
-     * 
+     *
      * @param dvId - parent dataverse id
      * @param dtype - type of object to return 'Dataverse' or 'Dataset'
      * @return - list of objects of specified type included in the subtree (includes parent dataverse if dtype is 'Dataverse')
@@ -761,7 +768,7 @@ private String getCommaSeparatedIdStringForSubtree(Dataverse d, String dtype) {
     }
 
     private List<Integer> getChildrenIdsRecursively(Long dvId, String dtype, DatasetVersion.VersionState versionState) {
-        
+
         //Intended to be called only with dvId != null
         String sql = "WITH RECURSIVE querytree AS (\n"
                 + "     SELECT id, dtype, owner_id, publicationdate\n"
diff --git a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
index 72d8f5402bb..74bb53e1191 100644
--- a/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/metrics/MetricsUtil.java
@@ -6,21 +6,19 @@
 import java.time.LocalDate;
 import java.time.YearMonth;
 import java.time.format.DateTimeFormatter;
-import java.time.format.DateTimeFormatterBuilder;
 import java.time.format.DateTimeParseException;
-import java.time.temporal.ChronoField;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.ws.rs.BadRequestException;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.ws.rs.BadRequestException;
 
 public class MetricsUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
index 6b31cfbecf8..0a64f42d840 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java
@@ -6,8 +6,10 @@
 import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.DataverseSession;
+import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.DvObjectServiceBean;
 import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
+import edu.harvard.iq.dataverse.api.auth.AuthRequired;
 import edu.harvard.iq.dataverse.search.SearchServiceBean;
 import edu.harvard.iq.dataverse.search.SolrQueryResponse;
 import edu.harvard.iq.dataverse.search.SolrSearchResult;
@@ -23,21 +25,22 @@
 import edu.harvard.iq.dataverse.search.SearchException;
 import edu.harvard.iq.dataverse.search.SearchFields;
 import edu.harvard.iq.dataverse.search.SortBy;
-import java.math.BigDecimal;
+
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 import java.util.logging.Logger;
 import java.util.Locale;
-import javax.ejb.EJB;
-import javax.inject.Inject;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
+import jakarta.ejb.EJB;
+import jakarta.inject.Inject;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.QueryParam;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Context;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import org.apache.commons.lang3.StringUtils;
@@ -81,7 +84,6 @@ public class DataRetrieverAPI extends AbstractApiBean {
     
     private List<DataverseRole> roleList;
     private DataverseRolePermissionHelper rolePermissionHelper;
-    private List<String> defaultDvObjectTypes = MyDataFilterParams.defaultDvObjectTypes;
     private MyDataFinder myDataFinder;
     private SolrQueryResponse solrQueryResponse;
     private AuthenticatedUser authUser = null;
@@ -98,11 +100,6 @@ public DataRetrieverAPI(){
            
     }
     
-    private int randInt(int min, int max) {
-        Random rand = new Random();
-        return rand.nextInt((max - min) + 1) + min;
-    }
-    
     public String getRetrieveDataFullAPIPath(){
         return DataRetrieverAPI.retrieveDataFullAPIPath;
     }
@@ -228,7 +225,12 @@ private SolrQueryResponse getTotalCountsFromSolr(DataverseRequest dataverseReque
                     //SearchFields.RELEASE_OR_CREATE_DATE, SortBy.DESCENDING,
                     0, //paginationStart,
                     true, // dataRelatedToMe
-                    SearchConstants.NUM_SOLR_DOCS_TO_RETRIEVE //10 // SearchFields.NUM_SOLR_DOCS_TO_RETRIEVE
+                    SearchConstants.NUM_SOLR_DOCS_TO_RETRIEVE, //10 // SearchFields.NUM_SOLR_DOCS_TO_RETRIEVE
+                    true, 
+                    null,
+                    null,
+                    false, // no need to request facets here ...
+                    false  // ... same for highlights
             );
         } catch (SearchException ex) {
             logger.severe("Search for total counts failed with filter query");
@@ -254,83 +256,56 @@ private String getJSONErrorString(String jsonMsg, String optionalLoggerMsg){
         return jsonData.build().toString();
         
     }
-    
 
-    /**
-     * @todo This should support the "X-Dataverse-key" header like the other
-     * APIs.
-     */
-    @Path(retrieveDataPartialAPIPath)
+
     @GET
+    @AuthRequired
+    @Path(retrieveDataPartialAPIPath)
     @Produces({"application/json"})
-    public String retrieveMyDataAsJsonString(@QueryParam("dvobject_types") List<String> dvobject_types, 
+    public String retrieveMyDataAsJsonString(
+            @Context ContainerRequestContext crc,
+            @QueryParam("dvobject_types") List<DvObject.DType> dvobject_types,
             @QueryParam("published_states") List<String> published_states, 
             @QueryParam("selected_page") Integer selectedPage, 
             @QueryParam("mydata_search_term") String searchTerm,             
             @QueryParam("role_ids") List<Long> roleIds, 
             @QueryParam("userIdentifier") String userIdentifier,
-            @QueryParam("key") String apiToken) { //String myDataParams) {
-        //System.out.println("_YE_OLDE_QUERY_COUNTER_");
-        //msgt("_YE_OLDE_QUERY_COUNTER_");  // for debug purposes
-        boolean DEBUG_MODE = false;
+            @QueryParam("filter_validities") Boolean filterValidities,
+            @QueryParam("dataset_valid") List<Boolean> datasetValidities) {
         boolean OTHER_USER = false;
 
         String localeCode = session.getLocaleCode();
         String noMsgResultsFound = BundleUtil.getStringFromPropertyFile("dataretrieverAPI.noMsgResultsFound",
                 "Bundle", new Locale(localeCode));
-        
-        // For, superusers, the searchUser may differ from the authUser
-        //
-        AuthenticatedUser searchUser = null;  
 
-        if (DEBUG_MODE==true){      // DEBUG: use userIdentifier
-            authUser = getUserFromIdentifier(userIdentifier);
-            if (authUser == null){
-                return this.getJSONErrorString("Requires authentication", "retrieveMyDataAsJsonString. User not found!  Shouldn't be using this anyway");              
+        if ((session.getUser() != null) && (session.getUser().isAuthenticated())) {
+            authUser = (AuthenticatedUser) session.getUser();
+        } else {
+            try {
+                authUser = getRequestAuthenticatedUserOrDie(crc);
+            } catch (WrappedResponse e) {
+                return this.getJSONErrorString("Requires authentication.  Please login.", "retrieveMyDataAsJsonString. User not found!  Shouldn't be using this anyway");
             }
-        } else if ((session.getUser() != null)&&(session.getUser().isAuthenticated())){            
-             authUser = (AuthenticatedUser)session.getUser();
-       
-             // If person is a superuser, see if a userIdentifier has been specified 
-             // and use that instead
-             if ((authUser.isSuperuser())&&(userIdentifier != null)&&(!userIdentifier.isEmpty())){
-                 searchUser = getUserFromIdentifier(userIdentifier);
-                 if (searchUser != null){
-                     authUser = searchUser;
-                     OTHER_USER = true;
-                 }else{
-                    return this.getJSONErrorString("No user found for: \"" + userIdentifier + "\"", null);              
-                 }
-             }       
-        } else if (apiToken != null) {      // Is this being accessed by an API Token?
-            
-            authUser = findUserByApiToken(apiToken);
-            if (authUser == null){
-                return this.getJSONErrorString("Requires authentication.  Please login.", "retrieveMyDataAsJsonString. User not found!  Shouldn't be using this anyway");              
-            }else{
-                // If person is a superuser, see if a userIdentifier has been specified 
-                // and use that instead
-                if ((authUser.isSuperuser())&&(userIdentifier != null)&&(!userIdentifier.isEmpty())){
-                    searchUser = getUserFromIdentifier(userIdentifier);
-                    if (searchUser != null){
-                        authUser = searchUser;
-                        OTHER_USER = true;
-                    }else{
-                        return this.getJSONErrorString("No user found for: \"" + userIdentifier + "\"", null);              
-                    }
-                }       
+        }
 
+        // For superusers, the searchUser may differ from the authUser
+        AuthenticatedUser searchUser = null;
+        // If the user is a superuser, see if a userIdentifier has been specified and use that instead
+        if ((authUser.isSuperuser()) && (userIdentifier != null) && (!userIdentifier.isEmpty())) {
+            searchUser = getUserFromIdentifier(userIdentifier);
+            if (searchUser != null) {
+                authUser = searchUser;
+                OTHER_USER = true;
+            } else {
+                return this.getJSONErrorString("No user found for: \"" + userIdentifier + "\"", null);
             }
-                    
-        } else{
-            return this.getJSONErrorString("Requires authentication.  Please login.", "retrieveMyDataAsJsonString. User not found!  Shouldn't be using this anyway");              
         }
-                     
+
         roleList = dataverseRoleService.findAll();
         rolePermissionHelper = new DataverseRolePermissionHelper(roleList);    
         
        
-        List<String> dtypes;
+        List<DvObject.DType> dtypes;
         if (dvobject_types != null){
             dtypes = dvobject_types;
         }else{
@@ -340,6 +315,10 @@ public String retrieveMyDataAsJsonString(@QueryParam("dvobject_types") List<Stri
         if (published_states != null){
             pub_states = published_states;
         }
+        List<Boolean> validities = null;
+        if (filterValidities != null && filterValidities){
+            validities = datasetValidities;
+        }
         
         // ---------------------------------
         // (1) Initialize filterParams and check for Errors 
@@ -347,7 +326,7 @@ public String retrieveMyDataAsJsonString(@QueryParam("dvobject_types") List<Stri
         DataverseRequest dataverseRequest = createDataverseRequest(authUser);
 
         
-        MyDataFilterParams filterParams = new MyDataFilterParams(dataverseRequest, dtypes, pub_states, roleIds, searchTerm);
+        MyDataFilterParams filterParams = new MyDataFilterParams(dataverseRequest, dtypes, pub_states, roleIds, searchTerm, validities);
         if (filterParams.hasError()){
             return this.getJSONErrorString(filterParams.getErrorMessage(), filterParams.getErrorMessage());
         }
@@ -462,7 +441,7 @@ public String retrieveMyDataAsJsonString(@QueryParam("dvobject_types") List<Stri
         //jsonData.add("total_dvobject_counts", getTotalCountsFromSolrAsJSON(searchUser, this.myDataFinder));
 
         
-        if (OTHER_USER==true){
+        if (OTHER_USER){
             jsonData.add("other_user", searchUser.getIdentifier());
         }
                                 
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java
index 0e99220005c..2ab248fcc0b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFilterParams.java
@@ -17,9 +17,9 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -33,8 +33,8 @@ public class MyDataFilterParams {
     // -----------------------------------
     // Static Reference objects
     // -----------------------------------
-    public static final List<String> defaultDvObjectTypes = Arrays.asList(DvObject.DATAVERSE_DTYPE_STRING, DvObject.DATASET_DTYPE_STRING);
-    public static final List<String> allDvObjectTypes = Arrays.asList(DvObject.DATAVERSE_DTYPE_STRING, DvObject.DATASET_DTYPE_STRING, DvObject.DATAFILE_DTYPE_STRING);
+    public static final List<DvObject.DType> defaultDvObjectTypes = Arrays.asList(DvObject.DType.Dataverse, DvObject.DType.Dataset);
+    public static final List<DvObject.DType> allDvObjectTypes = Arrays.asList(DvObject.DType.Dataverse, DvObject.DType.Dataset, DvObject.DType.Dataverse, DvObject.DType.DataFile);
     
     public static final List<String> defaultPublishedStates = Arrays.asList(IndexServiceBean.getPUBLISHED_STRING(),
                                                     IndexServiceBean.getUNPUBLISHED_STRING(),
@@ -48,22 +48,23 @@ public class MyDataFilterParams {
                                                     IndexServiceBean.getIN_REVIEW_STRING(),
                                                     IndexServiceBean.getDEACCESSIONED_STRING());*/
             
-    public static final HashMap<String, String> sqlToSolrSearchMap ;
+    public static final HashMap<DvObject.DType, String> sqlToSolrSearchMap ;
     static
     {
         sqlToSolrSearchMap = new HashMap<>();
-        sqlToSolrSearchMap.put(DvObject.DATAVERSE_DTYPE_STRING, SearchConstants.DATAVERSES);
-        sqlToSolrSearchMap.put(DvObject.DATASET_DTYPE_STRING, SearchConstants.DATASETS);
-        sqlToSolrSearchMap.put(DvObject.DATAFILE_DTYPE_STRING, SearchConstants.FILES);
+        sqlToSolrSearchMap.put(DvObject.DType.Dataverse, SearchConstants.DATAVERSES);
+        sqlToSolrSearchMap.put(DvObject.DType.Dataset, SearchConstants.DATASETS);
+        sqlToSolrSearchMap.put(DvObject.DType.DataFile, SearchConstants.FILES);
     }
     
-    public static final HashMap<String, String> userInterfaceToSqlSearchMap ;
+    public static final HashMap<DvObject.DType, String> userInterfaceToSqlSearchMap ;
     static
     {
         userInterfaceToSqlSearchMap = new HashMap<>();
-        userInterfaceToSqlSearchMap.put(DvObject.DATAVERSE_DTYPE_STRING, SearchConstants.UI_DATAVERSES);
-        userInterfaceToSqlSearchMap.put(DvObject.DATASET_DTYPE_STRING, SearchConstants.UI_DATAVERSES);
-        userInterfaceToSqlSearchMap.put(DvObject.DATAFILE_DTYPE_STRING, SearchConstants.UI_FILES);
+        
+        userInterfaceToSqlSearchMap.put(DvObject.DType.Dataverse, SearchConstants.UI_DATAVERSES);
+        userInterfaceToSqlSearchMap.put(DvObject.DType.Dataset, SearchConstants.UI_DATAVERSES);
+        userInterfaceToSqlSearchMap.put(DvObject.DType.DataFile, SearchConstants.UI_FILES);
     }
     
     
@@ -73,9 +74,10 @@ public class MyDataFilterParams {
     private DataverseRequest dataverseRequest;
     private AuthenticatedUser authenticatedUser;
     private String userIdentifier;
-    private List<String> dvObjectTypes;    
+    private List<DvObject.DType> dvObjectTypes;
     private List<String> publicationStatuses;
     private List<Long> roleIds;
+    private List<Boolean> datasetValidities;
     
     //private ArrayList<DataverseRole> roles;
     public static final String defaultSearchTerm = "*:*";
@@ -109,6 +111,7 @@ public MyDataFilterParams(DataverseRequest dataverseRequest, DataverseRolePermis
         }
         this.dvObjectTypes = MyDataFilterParams.allDvObjectTypes;
         this.publicationStatuses = MyDataFilterParams.allPublishedStates;
+        this.datasetValidities = null;
         this.searchTerm = MyDataFilterParams.defaultSearchTerm;
         this.roleIds = roleHelper.getRoleIdList();
     }
@@ -118,8 +121,9 @@ public MyDataFilterParams(DataverseRequest dataverseRequest, DataverseRolePermis
      * @param dvObjectTypes
      * @param publicationStatuses 
      * @param searchTerm 
-     */    
-    public MyDataFilterParams(DataverseRequest dataverseRequest, List<String> dvObjectTypes, List<String> publicationStatuses, List<Long> roleIds, String searchTerm){
+     * @param datasetValidities
+     */
+    public MyDataFilterParams(DataverseRequest dataverseRequest, List<DvObject.DType> dvObjectTypes, List<String> publicationStatuses, List<Long> roleIds, String searchTerm, List<Boolean> datasetValidities) {
         if (dataverseRequest==null){
             throw new NullPointerException("MyDataFilterParams constructor: dataverseRequest cannot be null ");
         }
@@ -138,6 +142,8 @@ public MyDataFilterParams(DataverseRequest dataverseRequest, List<String> dvObje
         }else{
             this.publicationStatuses = publicationStatuses;
         }
+
+        this.datasetValidities = datasetValidities;
         
         // Do something here if none chosen!
         this.roleIds = roleIds;
@@ -194,16 +200,9 @@ private void checkParams(){
             this.addError("No results. Please select one of " + StringUtils.join(MyDataFilterParams.defaultPublishedStates, ", ") + ".");
             return;
         }
-
-        for (String dtype : this.dvObjectTypes){
-            if (!DvObject.DTYPE_LIST.contains(dtype)){
-                this.addError("Sorry!  The type '" + dtype + "' is not known.");
-                return;
-            }               
-        }        
     }
     
-    public List<String> getDvObjectTypes(){
+    public List<DvObject.DType> getDvObjectTypes(){
         return this.dvObjectTypes;
     }
     
@@ -235,19 +234,19 @@ public void addError(String s){
     // start: Convenience methods for dvObjectTypes
     // --------------------------------------------
     public boolean areDataversesIncluded(){
-        if (this.dvObjectTypes.contains(DvObject.DATAVERSE_DTYPE_STRING)){
+        if (this.dvObjectTypes.contains(DvObject.DType.Dataverse)){
             return true;
         }
         return false;
     }
     public boolean areDatasetsIncluded(){
-        if (this.dvObjectTypes.contains(DvObject.DATASET_DTYPE_STRING)){
+        if (this.dvObjectTypes.contains(DvObject.DType.Dataset)){
             return true;
         }
         return false;
     }
     public boolean areFilesIncluded(){
-        if (this.dvObjectTypes.contains(DvObject.DATAFILE_DTYPE_STRING)){
+        if (this.dvObjectTypes.contains(DvObject.DType.DataFile)){
             return true;
         }
         return false;
@@ -259,7 +258,7 @@ public String getSolrFragmentForDvObjectType(){
         }
         
         List<String> solrTypes = new ArrayList<>();
-        for (String dtype : this.dvObjectTypes){
+        for (DvObject.DType dtype : this.dvObjectTypes){
             solrTypes.add(MyDataFilterParams.sqlToSolrSearchMap.get(dtype));
         }
                 
@@ -292,6 +291,20 @@ public String getSolrFragmentForPublicationStatus(){
         return  "(" + SearchFields.PUBLICATION_STATUS + ":" + valStr + ")";
     }
 
+    public String getSolrFragmentForDatasetValidity(){
+        if ((this.datasetValidities == null) || (this.datasetValidities.isEmpty())){
+            return "";
+        }
+    
+        
+        String valStr = StringUtils.join(datasetValidities, " OR ");
+        if (this.datasetValidities.size() > 1){
+            valStr = "(" + valStr + ")";
+        }
+
+        return  "(" + SearchFields.DATASET_VALID + ":" + valStr + ")";
+    }
+
     public String getDvObjectTypesAsJSONString(){
         
         return this.getDvObjectTypesAsJSON().build().toString();
@@ -312,19 +325,38 @@ public JsonArrayBuilder getListofSelectedPublicationStatuses(){
         return jsonArray;
                 
     }
+
+        
+    /**
+     * "dataset_valid" : [ true, false ]
+     *
+     * @return
+     */
+    public JsonArrayBuilder getListofSelectedValidities(){
+        if (this.datasetValidities == null || this.datasetValidities.isEmpty()) {
+            return null;
+        }
+
+        JsonArrayBuilder jsonArray = Json.createArrayBuilder();
+
+        for (Boolean valid : this.datasetValidities){
+            jsonArray.add(valid);
+        }
+        return jsonArray;
+    }
     
     
     public JsonObjectBuilder getDvObjectTypesAsJSON(){
         
         JsonArrayBuilder jsonArray = Json.createArrayBuilder();
 
-        jsonArray.add(Json.createObjectBuilder().add("value", DvObject.DATAVERSE_DTYPE_STRING)
+        jsonArray.add(Json.createObjectBuilder().add("value", DvObject.DType.Dataverse.getDType())
                             .add("label", SearchConstants.UI_DATAVERSES)
                             .add("selected", this.areDataversesIncluded()))
-                .add(Json.createObjectBuilder().add("value", DvObject.DATASET_DTYPE_STRING)
+                .add(Json.createObjectBuilder().add("value", DvObject.DType.Dataset.getDType())
                             .add("label", SearchConstants.UI_DATASETS)
                             .add("selected", this.areDatasetsIncluded()))
-                .add(Json.createObjectBuilder().add("value", DvObject.DATAFILE_DTYPE_STRING)
+                .add(Json.createObjectBuilder().add("value", DvObject.DType.DataFile.getDType())
                             .add("label", SearchConstants.UI_FILES)
                             .add("selected", this.areFilesIncluded())
                 );
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java
index 7aa0cab13d2..917884f3549 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java
@@ -18,9 +18,9 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.commons.lang3.StringUtils;
 
 /**
@@ -238,6 +238,12 @@ private List<String> getSolrFilterQueries(boolean totalCountsOnly){
         filterQueries.add(this.filterParams.getSolrFragmentForPublicationStatus());
         //fq=publicationStatus:"Unpublished"&fq=publicationStatus:"Draft"
 
+        // -----------------------------------------------------------------
+        // (4) FQ by dataset metadata vlidity
+        // -----------------------------------------------------------------
+        filterQueries.add(this.filterParams.getSolrFragmentForDatasetValidity());
+        //fq=datasetValid:(true OR false)
+
         return filterQueries;
     }
 
@@ -392,12 +398,17 @@ public String formatUserIdentifierAsAssigneeIdentifier(String userIdentifier){
      *
      * @return
      */
-    public JsonObjectBuilder getSelectedFilterParamsAsJSON(){
+    public JsonObjectBuilder getSelectedFilterParamsAsJSON() {
 
         JsonObjectBuilder jsonData = Json.createObjectBuilder();
         jsonData.add("publication_statuses", this.filterParams.getListofSelectedPublicationStatuses())
                 .add("role_names", this.getListofSelectedRoles());
 
+        JsonArrayBuilder selVal = this.filterParams.getListofSelectedValidities();
+        if (selVal != null) {
+            jsonData.add("dataset_valid", selVal);
+        }
+
         return jsonData;
     }
 
@@ -516,8 +527,8 @@ private boolean runStep2DirectAssignments(){
 
             this.childToParentIds.put(dvId, parentId);
 
-            switch(dtype){
-                case(DvObject.DATAVERSE_DTYPE_STRING):
+            switch(DvObject.DType.valueOf(dtype)){
+                case Dataverse:
                     //if (this.idsWithDataversePermissions.containsKey(dvId)){
                     this.directDataverseIds.add(dvId);  // Direct dataverse (no indirect dataverses)
                     //}
@@ -532,7 +543,7 @@ private boolean runStep2DirectAssignments(){
                         this.datasetParentIds.add(dvId);    // Parent to dataset
                     }
                     break;
-                case(DvObject.DATASET_DTYPE_STRING):
+                case Dataset:
                     //if (this.idsWithDatasetPermissions.containsKey(dvId)){
                     this.directDatasetIds.add(dvId); // Direct dataset
                     //}
@@ -540,7 +551,7 @@ private boolean runStep2DirectAssignments(){
                         this.fileParentIds.add(dvId);   // Parent to file
                     }
                     break;
-                case(DvObject.DATAFILE_DTYPE_STRING):
+                case DataFile:
                     if (this.idsWithFilePermissions.containsKey(dvId)){
                         this.directFileIds.add(dvId); // Direct file
                     }
@@ -585,7 +596,7 @@ private boolean runStep3FilePermsAssignedAtDataverse(){
             this.childToParentIds.put(dvId, parentId);
 
             // Should ALWAYS be a Dataset!
-            if (dtype.equals(DvObject.DATASET_DTYPE_STRING)){
+            if (DvObject.DType.valueOf(dtype).equals(DvObject.DType.Dataset)){
                 this.fileParentIds.add(dvId);
             }
         }
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java
index 03acc0ccddc..3ae64d9d760 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataPage.java
@@ -1,32 +1,29 @@
 package edu.harvard.iq.dataverse.mydata;
 
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
 import edu.harvard.iq.dataverse.DatasetPage;
 import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
 import edu.harvard.iq.dataverse.DataverseSession;
-import edu.harvard.iq.dataverse.DvObject;
-import static edu.harvard.iq.dataverse.DvObject.DATAFILE_DTYPE_STRING;
-import static edu.harvard.iq.dataverse.DvObject.DATASET_DTYPE_STRING;
-import static edu.harvard.iq.dataverse.DvObject.DATAVERSE_DTYPE_STRING;
 import edu.harvard.iq.dataverse.DvObjectServiceBean;
 import edu.harvard.iq.dataverse.PermissionsWrapper;
 import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
+import edu.harvard.iq.dataverse.SettingsWrapper;
 import edu.harvard.iq.dataverse.search.SearchServiceBean;
-import edu.harvard.iq.dataverse.search.SolrQueryResponse;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 /*
  * To change this license header, choose License Headers in Project Properties.
@@ -45,6 +42,7 @@ public class MyDataPage implements java.io.Serializable {
     private static final Logger logger = Logger.getLogger(DatasetPage.class.getCanonicalName());
 
     @Inject DataverseSession session;    
+    @Inject SettingsWrapper settingsWrapper;
 
     @EJB
     DataverseRoleServiceBean dataverseRoleService;
@@ -113,6 +111,17 @@ public List<String[]> getRoleInfoForCheckboxes(){
             return new ArrayList<>();
         }
     }
+    
+    public List<String[]> getValidityInfoForCheckboxes(){
+        return Arrays.asList(
+            new String[] {"true", "valid", BundleUtil.getStringFromBundle("valid")},
+            new String[] {"false", "incomplete", BundleUtil.getStringFromBundle("incomplete")}
+        );
+    }
+
+    public boolean showValidityFilter() {
+        return JvmSettings.UI_SHOW_VALIDITY_FILTER.lookupOptional(Boolean.class).orElse(false);
+    }
            
     public String getRetrieveDataFullAPIPath(){
         return DataRetrieverAPI.retrieveDataFullAPIPath;
@@ -167,7 +176,7 @@ public String init() {
         HttpServletRequest httpServletRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest();
 
         DataverseRequest dataverseRequest = new DataverseRequest(authUser, httpServletRequest);
-        this.filterParams = new MyDataFilterParams(dataverseRequest,  MyDataFilterParams.defaultDvObjectTypes, null, null, null);
+        this.filterParams = new MyDataFilterParams(dataverseRequest,  MyDataFilterParams.defaultDvObjectTypes, null, null, null, null);
         
         
         // Temp DataverseRolePermissionHelper -- not in its normal role but for creating initial checkboxes
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java b/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
index 4bf13e04284..096974b9d72 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/Pager.java
@@ -15,9 +15,9 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
index 06841c470d8..dcb76f42acb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/RolePermissionHelperPage.java
@@ -8,10 +8,10 @@
 import edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /*
  * To change this license header, choose License Headers in Project Properties.
@@ -46,13 +46,6 @@ public String init() {
         List<DataverseRole> roleList = dataverseRoleService.findAll();
         rolePermissionHelper = new DataverseRolePermissionHelper(roleList);
 
-        
-        List<String> dtypes = MyDataFilterParams.defaultDvObjectTypes;
-        //List<String> dtypes = Arrays.asList(DvObject.DATAFILE_DTYPE_STRING, DvObject.DATASET_DTYPE_STRING);
-        //DvObject.DATAFILE_DTYPE_STRING, DvObject.DATASET_DTYPE_STRING, DvObject.DATAVERSE_DTYPE_STRING
-        
-        //List<String> dtypes = new ArrayList<>();
-        
         return null;
     }
     
diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java b/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java
index 4556c92ff19..e328a50e962 100644
--- a/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java
+++ b/src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java
@@ -23,9 +23,8 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import org.apache.commons.lang3.StringUtils;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
 
 /**
  * Input:  dvObject id, parent Id, and dvObject type (from Solr)
@@ -346,7 +345,7 @@ private void findDataverseIdsForFiles(){
                        
             //msg("result: dvId: " + dvId + " |dtype: " + dtype + " |parentId: " + parentId);
             // Should ALWAYS be a Dataset!
-            if (dtype.equals(DvObject.DATASET_DTYPE_STRING)){  
+            if (DvObject.DType.valueOf(dtype).equals(DvObject.DType.Dataset)) {
                 this.childToParentIdHash.put(dvId, parentId); // Store the parent child relation
                 this.addIdNeedingRoleRetrieval(parentId); // We need the roles for this dataverse
                 this.idToDvObjectType.put(parentId, SearchConstants.SOLR_DATAVERSES); // store the dv object type
diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java
index a3150161c52..c078860ad8e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java
+++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetData.java
@@ -6,19 +6,19 @@
 import java.sql.Timestamp;
 import java.util.Date;
 import java.util.UUID;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
-import javax.persistence.Table;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
 
 @Table(indexes = {@Index(columnList="token")
 		, @Index(columnList="builtinuser_id")})
diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java
index b9eabf45159..b19721d56bb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetPage.java
@@ -16,19 +16,19 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
 import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
 import org.apache.commons.lang3.StringUtils;
 import org.hibernate.validator.constraints.NotBlank;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java
index c8db23985d8..5d1c167d2a5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBean.java
@@ -10,18 +10,17 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 
 import java.text.MessageFormat;
-import java.util.Date;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 
 @Stateless
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java
index 58b765b31ab..3bd9d9dd022 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/FakePidProviderServiceBean.java
@@ -1,68 +1,36 @@
 package edu.harvard.iq.dataverse.pidproviders;
 
-import edu.harvard.iq.dataverse.AbstractGlobalIdServiceBean;
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.DvObject;
 import edu.harvard.iq.dataverse.GlobalId;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateNewDatasetCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.ImportDatasetCommand;
-import edu.harvard.iq.dataverse.util.FileUtil;
 
-import java.lang.StackWalker.StackFrame;
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Optional;
 import java.util.logging.Logger;
-import java.util.stream.Stream;
 
-import javax.ejb.Stateless;
+import jakarta.ejb.Stateless;
 
 @Stateless
-public class FakePidProviderServiceBean extends AbstractGlobalIdServiceBean {
+public class FakePidProviderServiceBean extends DOIServiceBean {
 
     private static final Logger logger = Logger.getLogger(FakePidProviderServiceBean.class.getCanonicalName());
 
-    @Override
-    public boolean alreadyExists(DvObject dvo) throws Exception {
-        /*
-         * This method is called in cases where the 'right' answer can be true or false:
-         * 
-         * When called via CreateNewDatasetCommand (direct upload case), we expect
-         * 'false' as the response, whereas when called from ImportDatasetCommand or
-         * DeleteDataFileCommand, we expect 'true' as a confirmation that the expected
-         * PID exists.
-         * 
-         * This method now checks the stack and can send true/false as expected by the
-         * calling command as the right default/normal case.
-         *
-         * Alternately, this method could check the database as is done in
-         * DatasetServiceBean.isIdentifierLocallyUnique() (needs a similar method for
-         * DataFiles and could be refactored to only have one query for both).
-         */
-        StackWalker walker = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE);
-        if (walker.walk(this::getCallingClass)) {
-            logger.fine("Called from CreateNewDatasetCommand");
-            return false;
+    
+    //Only need to check locally
+    public boolean isGlobalIdUnique(GlobalId globalId) {
+        try {
+            return ! alreadyRegistered(globalId, false);
+        } catch (Exception e){
+            //we can live with failure - means identifier not found remotely
         }
         return true;
     }
-
-    private boolean getCallingClass(Stream<StackFrame> stackFrameStream) {
-        /*
-         * If/when other cases require a true response from the alreadyExists method,
-         * add those class names to the test below.
-         */
-        return stackFrameStream
-                .filter(frame -> frame.getDeclaringClass().getSimpleName()
-                        .equals(CreateNewDatasetCommand.class.getSimpleName()))
-                .findFirst().map(f -> true).orElse(false);
-    }
     
     @Override
-    public boolean alreadyExists(GlobalId globalId) throws Exception {
-        //Could use the same method as above to return false if/when needed.
-        return true;
+    public boolean alreadyRegistered(GlobalId globalId, boolean noProviderDefault) {
+        boolean existsLocally = !dvObjectService.isGlobalIdLocallyUnique(globalId);
+        return existsLocally ? existsLocally : noProviderDefault;
     }
 
     @Override
@@ -72,12 +40,7 @@ public boolean registerWhenPublished() {
 
     @Override
     public List<String> getProviderInformation() {
-        ArrayList<String> providerInfo = new ArrayList<>();
-        String providerName = "FAKE";
-        String providerLink = "http://dataverse.org";
-        providerInfo.add(providerName);
-        providerInfo.add(providerLink);
-        return providerInfo;
+        return List.of("FAKE", "https://dataverse.org");
     }
 
     @Override
@@ -101,15 +64,14 @@ public void deleteIdentifier(DvObject dvo) throws Exception {
         // no-op
     }
 
-    @Override
-    public Map<String, String> lookupMetadataFromIdentifier(String protocol, String authority, String identifier) {
-        Map<String, String> map = new HashMap<>();
-        return map;
-    }
-
     @Override
     public boolean publicizeIdentifier(DvObject studyIn) {
         return true;
     }
+    
+    @Override
+    protected String getProviderKeyName() {
+        return "FAKE";
+    }
 
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java
new file mode 100644
index 00000000000..d145a7ec106
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PermaLinkPidProviderServiceBean.java
@@ -0,0 +1,160 @@
+package edu.harvard.iq.dataverse.pidproviders;
+
+import edu.harvard.iq.dataverse.AbstractGlobalIdServiceBean;
+import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.GlobalIdServiceBean;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.Stateless;
+
+/**
+ * PermaLink provider
+ * This is a minimalist permanent ID provider intended for use with 'real' datasets/files where the use case none-the-less doesn't lend itself to the use of DOIs or Handles, e.g.
+ * * due to cost
+ * * for a catalog/archive where Dataverse has a dataset representing a dataset with DOI/handle stored elsewhere
+ * 
+ * The initial implementation will mint identifiers locally and will provide the existing page URLs (using the ?persistentID=<id> format). 
+ * This will be overridable by a configurable parameter to support use of an external resolver.
+ * 
+ */
+@Stateless
+public class PermaLinkPidProviderServiceBean extends AbstractGlobalIdServiceBean {
+
+    private static final Logger logger = Logger.getLogger(PermaLinkPidProviderServiceBean.class.getCanonicalName());
+
+    public static final String PERMA_PROTOCOL = "perma";
+    public static final String PERMA_PROVIDER_NAME = "PERMA";
+
+    //ToDo - handle dataset/file defaults for local system
+    public static final String PERMA_RESOLVER_URL = JvmSettings.PERMALINK_BASEURL
+        .lookupOptional()
+        .orElse(SystemConfig.getDataverseSiteUrlStatic());
+    
+    String authority = null; 
+    private String separator = "";
+    
+    @PostConstruct
+    private void init() {
+        if(PERMA_PROTOCOL.equals(settingsService.getValueForKey(Key.Protocol))){
+            authority = settingsService.getValueForKey(Key.Authority);
+            configured=true;
+        };
+        
+    }
+    
+    
+    //Only used in PidUtilTest - haven't figured out how to mock a PostConstruct call directly
+    // ToDo - remove after work to allow more than one Pid Provider which is expected to not use stateless beans
+    public void reInit() {
+        init();
+    }
+    
+    @Override
+    public String getSeparator() {
+        //The perma default
+        return separator;
+    }
+    
+    @Override
+    public boolean alreadyRegistered(GlobalId globalId, boolean noProviderDefault) {
+        // Perma doesn't manage registration, so we assume all local PIDs can be treated
+        // as registered
+        boolean existsLocally = !dvObjectService.isGlobalIdLocallyUnique(globalId);
+        return existsLocally ? existsLocally : noProviderDefault;
+    }
+    
+    @Override
+    public boolean registerWhenPublished() {
+        return false;
+    }
+
+    @Override
+    public List<String> getProviderInformation() {
+        return List.of(PERMA_PROVIDER_NAME, PERMA_RESOLVER_URL);
+    }
+
+    @Override
+    public String createIdentifier(DvObject dvo) throws Throwable {
+        //Call external resolver and send landing URL?
+        //FWIW: Return value appears to only be used in RegisterDvObjectCommand where success requires finding the dvo identifier in this string. (Also logged a couple places).
+        return(dvo.getGlobalId().asString());
+    }
+
+    @Override
+    public Map<String, String> getIdentifierMetadata(DvObject dvo) {
+        Map<String, String> map = new HashMap<>();
+        return map;
+    }
+
+    @Override
+    public String modifyIdentifierTargetURL(DvObject dvo) throws Exception {
+        return getTargetUrl(dvo);
+    }
+
+    @Override
+    public void deleteIdentifier(DvObject dvo) throws Exception {
+        // no-op
+    }
+
+    @Override
+    public boolean publicizeIdentifier(DvObject dvObject) {
+        //Generate if needed (i.e. datafile case where we don't create/register early (even with reigsterWhenPublished == false))
+        if(dvObject.getIdentifier() == null || dvObject.getIdentifier().isEmpty() ){
+            dvObject = generateIdentifier(dvObject);
+        }
+        //Call external resolver and send landing URL?
+        return true;
+    }
+
+    @Override
+    public GlobalId parsePersistentId(String pidString) {
+        //ToDo - handle local PID resolver for dataset/file
+        if (pidString.startsWith(getUrlPrefix())) {
+            pidString = pidString.replace(getUrlPrefix(),
+                    (PERMA_PROTOCOL + ":"));
+        }
+        return super.parsePersistentId(pidString);
+    }
+
+    @Override
+    public GlobalId parsePersistentId(String protocol, String identifierString) {
+        logger.fine("Checking Perma: " + identifierString);
+        if (!PERMA_PROTOCOL.equals(protocol)) {
+            return null;
+        }
+        String identifier = null;
+        if (authority != null) {
+            if (identifierString.startsWith(authority)) {
+                identifier = identifierString.substring(authority.length());
+            }
+        }
+        identifier = GlobalIdServiceBean.formatIdentifierString(identifier);
+        if (GlobalIdServiceBean.testforNullTerminator(identifier)) {
+            return null;
+        }
+        return new GlobalId(PERMA_PROTOCOL, authority, identifier, separator, getUrlPrefix(), PERMA_PROVIDER_NAME);
+    }
+    
+    @Override
+    public GlobalId parsePersistentId(String protocol, String authority, String identifier) {
+        if (!PERMA_PROTOCOL.equals(protocol)) {
+            return null;
+        }
+        return super.parsePersistentId(protocol, authority, identifier);
+    }
+
+    @Override
+    public String getUrlPrefix() {
+        
+        return PERMA_RESOLVER_URL + "/citation?persistentId=" + PERMA_PROTOCOL + ":";
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java
new file mode 100644
index 00000000000..5bc855a9593
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidHelper.java
@@ -0,0 +1,43 @@
+package edu.harvard.iq.dataverse.pidproviders;
+
+import java.util.Arrays;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
+
+import edu.harvard.iq.dataverse.DOIDataCiteServiceBean;
+import edu.harvard.iq.dataverse.DOIEZIdServiceBean;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
+
+    /**
+     * This is a small helper bean 
+     * As it is a singleton and built at application start (=deployment), it will inject the (stateless)
+     * dataverse service into the BrandingUtil once it's ready.
+     */
+    @Startup
+    @Singleton
+    public class PidHelper {
+
+        @EJB
+        DOIDataCiteServiceBean datacitePidSvc;
+        @EJB
+        DOIEZIdServiceBean ezidPidSvc;
+        @EJB
+        HandlenetServiceBean handlePidSvc;
+        @EJB
+        FakePidProviderServiceBean fakePidSvc;
+        @EJB
+        PermaLinkPidProviderServiceBean permaPidSvc;
+        @EJB
+        UnmanagedDOIServiceBean unmanagedDOISvc;
+        @EJB
+        UnmanagedHandlenetServiceBean unmanagedHandleSvc;
+
+        @PostConstruct
+        public void listServices() {
+            PidUtil.addAllToProviderList(Arrays.asList(datacitePidSvc, ezidPidSvc, handlePidSvc, permaPidSvc, fakePidSvc));
+            PidUtil.addAllToUnmanagedProviderList(Arrays.asList(unmanagedDOISvc, unmanagedHandleSvc));
+        }
+
+    }
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java
index f9b451ecab2..78305648f67 100644
--- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java
@@ -1,49 +1,54 @@
 package edu.harvard.iq.dataverse.pidproviders;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.GlobalIdServiceBean;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.HttpURLConnection;
-import java.net.MalformedURLException;
-import java.net.ProtocolException;
 import java.net.URL;
 import java.util.Arrays;
 import java.util.Base64;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.BadRequestException;
-import javax.ws.rs.InternalServerErrorException;
-import javax.ws.rs.NotFoundException;
-import javax.ws.rs.ServiceUnavailableException;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Response;
+
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.BadRequestException;
+import jakarta.ws.rs.InternalServerErrorException;
+import jakarta.ws.rs.NotFoundException;
+import jakarta.ws.rs.ServiceUnavailableException;
 
 public class PidUtil {
 
     private static final Logger logger = Logger.getLogger(PidUtil.class.getCanonicalName());
 
     /**
-     * @throws BadRequestException if user didn't supply a DOI.
+     * @throws BadRequestException          if user didn't supply a DOI.
      *
-     * @throws NotFoundException if DOI not found in DataCite.
+     * @throws NotFoundException            if DOI not found in DataCite.
      *
-     * @throws ServiceUnavailableException if non 200 or non 404 response from
-     * DataCite.
+     * @throws ServiceUnavailableException  if non 200 or non 404 response from
+     *                                      DataCite.
      *
      * @throws InternalServerErrorException on local misconfiguration such as
-     * DataCite hostname not in DNS.
+     *                                      DataCite hostname not in DNS.
      */
-    public static JsonObjectBuilder queryDoi(String persistentId, String baseUrl, String username, String password) {
+    public static JsonObjectBuilder queryDoi(GlobalId globalId, String baseUrl, String username, String password) {
         try {
             // This throws an exception if this is not a DOI, which is the only
             // user-supplied param - treat this as a BadRequest in the catch statement.
-            String doi = acceptOnlyDoi(persistentId);
+            String doi = acceptOnlyDoi(globalId);
             URL url;
-            // Other errors are all internal misconfiguration (any problems creating the URL), the
-            // DOI doesn't exist (404 from DataCite), or problem at DataCite (other non-200 responses).
+            // Other errors are all internal misconfiguration (any problems creating the
+            // URL), the
+            // DOI doesn't exist (404 from DataCite), or problem at DataCite (other non-200
+            // responses).
             int status = 0;
             HttpURLConnection connection = null;
             try {
@@ -63,17 +68,20 @@ public static JsonObjectBuilder queryDoi(String persistentId, String baseUrl, St
                         BundleUtil.getStringFromBundle("pids.datacite.errors.noResponseCode", Arrays.asList(baseUrl)));
             }
             if (status == 404) {
-                //Could check to see if Dataverse expects the DOI to be registered - that would result in a 404 from Dataverse before having to contact DataCite, and DataCite could still return a 404
-                throw new NotFoundException("404 (NOT FOUND) from DataCite for DOI " + persistentId);
+                // Could check to see if Dataverse expects the DOI to be registered - that would
+                // result in a 404 from Dataverse before having to contact DataCite, and
+                // DataCite could still return a 404
+                throw new NotFoundException("404 (NOT FOUND) from DataCite for DOI " + globalId);
             }
             if (status != 200) {
-                /* We could just send back whatever status code DataCite sends, but we've seen
+                /*
+                 * We could just send back whatever status code DataCite sends, but we've seen
                  * DataCite sometimes respond with 403 when the credentials were OK, and their
-                 * 500 error doesn't mean a problem with Dataverse, so wrapping any of them in
-                 * a 503 error, to indicate this is a temporary error, might be the better option. In any case, we need to log the
-                 * issue to be able to debug it.
+                 * 500 error doesn't mean a problem with Dataverse, so wrapping any of them in a
+                 * 503 error, to indicate this is a temporary error, might be the better option.
+                 * In any case, we need to log the issue to be able to debug it.
                  */
-                logger.severe("Received " + status + " error from DataCite for DOI: " + persistentId); 
+                logger.severe("Received " + status + " error from DataCite for DOI: " + globalId);
                 InputStream errorStream = connection.getErrorStream();
                 if (errorStream != null) {
                     JsonObject out = Json.createReader(connection.getErrorStream()).readObject();
@@ -104,11 +112,107 @@ public static JsonObjectBuilder queryDoi(String persistentId, String baseUrl, St
      * @param PID in the form doi:10.7910/DVN/TJCLKP
      * @return DOI in the form 10.7910/DVN/TJCLKP (no "doi:")
      */
-    private static String acceptOnlyDoi(String persistentId) {
-        GlobalId globalId = new GlobalId(persistentId);
-        if (!GlobalId.DOI_PROTOCOL.equals(globalId.getProtocol())) {
+    private static String acceptOnlyDoi(GlobalId globalId) {
+        if (!DOIServiceBean.DOI_PROTOCOL.equals(globalId.getProtocol())) {
             throw new IllegalArgumentException(BundleUtil.getStringFromBundle("pids.datacite.errors.DoiOnly"));
         }
         return globalId.getAuthority() + "/" + globalId.getIdentifier();
     }
+
+    static Map<String, GlobalIdServiceBean> providerMap = new HashMap<String, GlobalIdServiceBean>();
+    static Map<String, GlobalIdServiceBean> unmanagedProviderMap = new HashMap<String, GlobalIdServiceBean>();
+
+    public static void addAllToProviderList(List<GlobalIdServiceBean> list) {
+        for (GlobalIdServiceBean pidProvider : list) {
+            providerMap.put(pidProvider.getProviderInformation().get(0), pidProvider);
+        }
+    }
+
+    public static void addAllToUnmanagedProviderList(List<GlobalIdServiceBean> list) {
+        for (GlobalIdServiceBean pidProvider : list) {
+            unmanagedProviderMap.put(pidProvider.getProviderInformation().get(0), pidProvider);
+        }
+    }
+
+    /**
+     * 
+     * @param identifier The string to be parsed
+     * @throws IllegalArgumentException if the passed string cannot be parsed.
+     */
+    public static GlobalId parseAsGlobalID(String identifier) {
+        logger.fine("In parseAsGlobalId: " + providerMap.size());
+        for (GlobalIdServiceBean pidProvider : providerMap.values()) {
+            logger.fine(" Checking " + String.join(",", pidProvider.getProviderInformation()));
+            GlobalId globalId = pidProvider.parsePersistentId(identifier);
+            if (globalId != null) {
+                return globalId;
+            }
+        }
+        // If no providers can managed this PID, at least allow it to be recognized
+        for (GlobalIdServiceBean pidProvider : unmanagedProviderMap.values()) {
+            logger.fine(" Checking " + String.join(",", pidProvider.getProviderInformation()));
+            GlobalId globalId = pidProvider.parsePersistentId(identifier);
+            if (globalId != null) {
+                return globalId;
+            }
+        }
+        throw new IllegalArgumentException("Failed to parse identifier: " + identifier);
+    }
+
+    /**
+     * 
+     * @param identifier The string to be parsed
+     * @throws IllegalArgumentException if the passed string cannot be parsed.
+     */
+    public static GlobalId parseAsGlobalID(String protocol, String authority, String identifier) {
+        logger.fine("Looking for " + protocol + " " + authority + " " + identifier);
+        logger.fine("In parseAsGlobalId: " + providerMap.size());
+        for (GlobalIdServiceBean pidProvider : providerMap.values()) {
+            logger.fine(" Checking " + String.join(",", pidProvider.getProviderInformation()));
+            GlobalId globalId = pidProvider.parsePersistentId(protocol, authority, identifier);
+            if (globalId != null) {
+                return globalId;
+            }
+        }
+        for (GlobalIdServiceBean pidProvider : unmanagedProviderMap.values()) {
+            logger.fine(" Checking " + String.join(",", pidProvider.getProviderInformation()));
+            GlobalId globalId = pidProvider.parsePersistentId(protocol, authority, identifier);
+            if (globalId != null) {
+                return globalId;
+            }
+        }
+        // For unit tests which don't have any provider Beans - todo remove when
+        // providers are no longer beans and can be configured easily in tests
+        return parseUnmanagedDoiOrHandle(protocol, authority, identifier);
+        // throw new IllegalArgumentException("Failed to parse identifier from protocol:
+        // " + protocol + ", authority:" + authority + ", identifier: " + identifier);
+    }
+    /*
+     * This method should be deprecated/removed when further refactoring to support
+     * multiple PID providers is done. At that point, when the providers aren't
+     * beans, this code can be moved into other classes that go in the providerMap.
+     * If this method is not kept in sync with the DOIServiceBean and
+     * HandlenetServiceBean implementations, the tests using it won't be valid tests
+     * of the production code.
+     */
+
+    private static GlobalId parseUnmanagedDoiOrHandle(String protocol, String authority, String identifier) {
+        // Default recognition - could be moved to new classes in the future.
+        if (!GlobalIdServiceBean.isValidGlobalId(protocol, authority, identifier)) {
+            return null;
+        }
+        String urlPrefix = null;
+        switch (protocol) {
+        case DOIServiceBean.DOI_PROTOCOL:
+            if (!GlobalIdServiceBean.checkDOIAuthority(authority)) {
+                return null;
+            }
+            urlPrefix = DOIServiceBean.DOI_RESOLVER_URL;
+            break;
+        case HandlenetServiceBean.HDL_PROTOCOL:
+            urlPrefix = HandlenetServiceBean.HDL_RESOLVER_URL;
+            break;
+        }
+        return new GlobalId(protocol, authority, identifier, "/", urlPrefix, null);
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java
new file mode 100644
index 00000000000..f7e9372cc9b
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedDOIServiceBean.java
@@ -0,0 +1,83 @@
+package edu.harvard.iq.dataverse.pidproviders;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Logger;
+
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.Stateless;
+
+import org.apache.commons.httpclient.HttpException;
+import org.apache.commons.lang3.NotImplementedException;
+
+import edu.harvard.iq.dataverse.DOIServiceBean;
+import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.GlobalId;
+
+/** This class is just used to parse DOIs that are not managed by any account configured in Dataverse
+ * It does not implement any of the methods related to PID CRUD
+ * 
+ */
+
+@Stateless
+public class UnmanagedDOIServiceBean extends DOIServiceBean {
+
+    private static final Logger logger = Logger.getLogger(UnmanagedDOIServiceBean.class.getCanonicalName());
+
+    @PostConstruct
+    private void init() {
+        // Always on
+        configured = true;
+    }
+
+    @Override
+    public boolean canManagePID() {
+        return false;
+    }
+
+    @Override
+    public boolean registerWhenPublished() {
+        return false;
+    }
+
+    @Override
+    public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public String createIdentifier(DvObject dvObject) throws Exception {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public Map<String, String> getIdentifierMetadata(DvObject dvObject) {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public String modifyIdentifierTargetURL(DvObject dvObject) throws Exception {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public void deleteIdentifier(DvObject dvObject) throws IOException, HttpException {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public boolean publicizeIdentifier(DvObject dvObject) {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public List<String> getProviderInformation() {
+        return List.of("UnmanagedDOIProvider", "");
+    }
+
+
+    // PID recognition
+    // Done by DOIServiceBean
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java
new file mode 100644
index 00000000000..c856c5363e0
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/UnmanagedHandlenetServiceBean.java
@@ -0,0 +1,105 @@
+package edu.harvard.iq.dataverse.pidproviders;
+
+import edu.harvard.iq.dataverse.AbstractGlobalIdServiceBean;
+import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
+import java.util.*;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import jakarta.ejb.Stateless;
+import org.apache.commons.lang3.NotImplementedException;
+
+/** This class is just used to parse Handles that are not managed by any account configured in Dataverse
+ * It does not implement any of the methods related to PID CRUD
+ * 
+ */
+@Stateless
+public class UnmanagedHandlenetServiceBean extends AbstractGlobalIdServiceBean {
+
+    private static final Logger logger = Logger.getLogger(UnmanagedHandlenetServiceBean.class.getCanonicalName());
+
+    public UnmanagedHandlenetServiceBean() {
+        logger.log(Level.FINE, "Constructor");
+        configured = true;
+    }
+
+    @Override
+    public boolean canManagePID() {
+        return false;
+    }
+
+    @Override
+    public boolean registerWhenPublished() {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public boolean alreadyRegistered(GlobalId pid, boolean noProviderDefault) throws Exception {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public Map<String, String> getIdentifierMetadata(DvObject dvObject) {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public String modifyIdentifierTargetURL(DvObject dvObject) throws Exception {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public void deleteIdentifier(DvObject dvObject) throws Exception {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public List<String> getProviderInformation() {
+        return List.of("UnmanagedHandle", "");
+    }
+
+    @Override
+    public String createIdentifier(DvObject dvObject) throws Throwable {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public boolean publicizeIdentifier(DvObject dvObject) {
+        throw new NotImplementedException();
+    }
+
+    @Override
+    public GlobalId parsePersistentId(String pidString) {
+        if (pidString.startsWith(HandlenetServiceBean.HDL_RESOLVER_URL)) {
+            pidString = pidString.replace(HandlenetServiceBean.HDL_RESOLVER_URL,
+                    (HandlenetServiceBean.HDL_PROTOCOL + ":"));
+        } else if (pidString.startsWith(HandlenetServiceBean.HTTP_HDL_RESOLVER_URL)) {
+            pidString = pidString.replace(HandlenetServiceBean.HTTP_HDL_RESOLVER_URL,
+                    (HandlenetServiceBean.HDL_PROTOCOL + ":"));
+        }
+        return super.parsePersistentId(pidString);
+    }
+
+    @Override
+    public GlobalId parsePersistentId(String protocol, String identifierString) {
+        if (!HandlenetServiceBean.HDL_PROTOCOL.equals(protocol)) {
+            return null;
+        }
+        GlobalId globalId = super.parsePersistentId(protocol, identifierString);
+        return globalId;
+    }
+
+    @Override
+    public GlobalId parsePersistentId(String protocol, String authority, String identifier) {
+        if (!HandlenetServiceBean.HDL_PROTOCOL.equals(protocol)) {
+            return null;
+        }
+        return super.parsePersistentId(protocol, authority, identifier);
+    }
+
+    @Override
+    public String getUrlPrefix() {
+        return HandlenetServiceBean.HDL_RESOLVER_URL;
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
index b0658f10b34..9af4bb6af9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlPage.java
@@ -4,10 +4,10 @@
 import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import java.io.Serializable;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 
 /**
  * Backing bean for JSF page. Sets session to {@link PrivateUrlUser}. 
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
index efe64052c4a..9e5879106e4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlServiceBean.java
@@ -2,19 +2,20 @@
 
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
+import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.RoleAssignment;
 import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.Serializable;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  *
@@ -61,6 +62,13 @@ public PrivateUrlRedirectData getPrivateUrlRedirectDataFromToken(String token) {
         return PrivateUrlUtil.getPrivateUrlRedirectData(getRoleAssignmentFromPrivateUrlToken(token));
     }
 
+    /**
+     * @return DatasetVersion if it can be found using the token or null.
+     */
+    public DatasetVersion getDraftDatasetVersionFromToken(String token) {
+        return PrivateUrlUtil.getDraftDatasetVersionFromRoleAssignment(getRoleAssignmentFromPrivateUrlToken(token));
+    }
+
     /**
      * @return A RoleAssignment or null.
      *
diff --git a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtil.java b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtil.java
index c363139c912..e9d95e2faf4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtil.java
@@ -130,7 +130,7 @@ static String getDraftDatasetPageToBeRedirectedTo(RoleAssignment roleAssignment)
     static String getDraftUrl(DatasetVersion draft) {
         if (draft != null) {
             Dataset dataset = draft.getDataset();
-            if (dataset != null) {
+            if (dataset != null && dataset.getGlobalId()!=null) {
                 if ( dataset.getGlobalId().isComplete() ) {
                     String relativeUrl = "/dataset.xhtml?persistentId=" + dataset.getGlobalId().toString() + "&version=DRAFT";
                     return relativeUrl;
diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java
index 65b97b80eea..ba5aba1b69b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvEntityFileDataConverter.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse.provenance;
 
-import javax.enterprise.inject.spi.CDI;
-import javax.faces.component.UIComponent;
-import javax.faces.context.FacesContext;
-import javax.faces.convert.Converter;
-import javax.faces.convert.FacesConverter;
-import javax.inject.Inject;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.convert.Converter;
+import jakarta.faces.convert.FacesConverter;
+import jakarta.inject.Inject;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java
index a17e77f2a9e..c2eecb90d9e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvInvestigator.java
@@ -5,12 +5,12 @@
 import com.google.gson.GsonBuilder;
 import com.google.gson.JsonElement;
 import com.google.gson.JsonParser;
-import edu.harvard.iq.dataverse.api.AbstractApiBean;
+
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 import org.everit.json.schema.Schema;
 import org.everit.json.schema.ValidationException;
 import org.everit.json.schema.loader.SchemaLoader;
diff --git a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
index a4b7cdf8d4e..6e8a512902a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/provenance/ProvPopupFragmentBean.java
@@ -24,19 +24,19 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
 import org.apache.commons.io.IOUtils;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.ExternalContext;
-import javax.faces.context.FacesContext;
-import javax.json.JsonObject;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.ExternalContext;
+import jakarta.faces.context.FacesContext;
+import jakarta.json.JsonObject;
 import org.primefaces.model.file.UploadedFile;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java
index 4d89a2842cd..c252d2e3330 100644
--- a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerPage.java
@@ -6,10 +6,10 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.JsonArray;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.JsonArray;
 
 @Stateless
 @Named
diff --git a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java
index ee52254d6f5..8501fba3ce0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtil.java
@@ -3,15 +3,15 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.locality.StorageSite;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import edu.harvard.iq.dataverse.util.SystemConfig;
+
 import java.io.File;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
 
 public class RepositoryStorageAbstractionLayerUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java b/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java
index ef37569ac54..bc92959a5ac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java
@@ -14,20 +14,16 @@
 import java.io.UnsupportedEncodingException;
 import java.net.URLEncoder;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
-import java.util.StringTokenizer;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.faces.view.ViewScoped;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.json.JsonObject;
+import jakarta.ejb.EJB;
+import jakarta.faces.view.ViewScoped;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.json.JsonObject;
 
 import org.apache.commons.lang3.StringUtils;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java b/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java
index f8bd0ea5a10..f97c1641479 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/FacetLabel.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.search;
 
-import javax.inject.Named;
+import jakarta.inject.Named;
 
 @Named
 public class FacetLabel implements Comparable<FacetLabel>{
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java b/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java
index 98a882c13ca..d40d8c362af 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/Highlight.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.search;
 
 import java.util.List;
-import javax.inject.Named;
+import jakarta.inject.Named;
 
 @Named
 public class Highlight {
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java
index a04ae934259..c9cf1cd9dc7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexAsync.java
@@ -5,9 +5,9 @@
 import java.util.Collection;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
 
 @Stateless
 public class IndexAsync {
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java
index 34c145fa6e8..0eeb681514c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexBatchServiceBean.java
@@ -1,30 +1,27 @@
 package edu.harvard.iq.dataverse.search;
 
-import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetServiceBean;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.DvObjectServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.IOException;
-import java.sql.Timestamp;
 import java.util.ArrayList;
-import java.util.Date;
 import java.util.List;
 import java.util.concurrent.Future;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.AsyncResult;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.ejb.AsyncResult;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 import org.apache.solr.client.solrj.SolrServerException;
 
 @Named
@@ -205,15 +202,9 @@ public Future<String> indexAllOrSubset(long numPartitions, long partitionId, boo
         int datasetFailureCount = 0;
         List<Long> datasetIds = datasetService.findAllOrSubsetOrderByFilesOwned(skipIndexed);
         for (Long id : datasetIds) {
-            try {
-                datasetIndexCount++;
-                logger.info("indexing dataset " + datasetIndexCount + " of " + datasetIds.size() + " (id=" + id + ")");
-                Future<String> result = indexService.indexDatasetInNewTransaction(id);
-            } catch (Exception e) {
-                //We want to keep running even after an exception so throw some more info into the log
-                datasetFailureCount++;
-                logger.info("FAILURE indexing dataset " + datasetIndexCount + " of " + datasetIds.size() + " (id=" + id + ") Exception info: " + e.getMessage());
-            }
+            datasetIndexCount++;
+            logger.info("indexing dataset " + datasetIndexCount + " of " + datasetIds.size() + " (id=" + id + ")");
+            indexService.indexDatasetInNewTransaction(id);
         }
         logger.info("done iterating through all datasets");
 
@@ -269,15 +260,9 @@ public void indexDataverseRecursively(Dataverse dataverse) {
         
         // index the Dataset children
         for (Long childId : datasetChildren) {
-            try {
-                datasetIndexCount++;
-                logger.info("indexing dataset " + datasetIndexCount + " of " + datasetChildren.size() + " (id=" + childId + ")");
-                indexService.indexDatasetInNewTransaction(childId);
-            } catch (Exception e) {
-                //We want to keep running even after an exception so throw some more info into the log
-                datasetFailureCount++;
-                logger.info("FAILURE indexing dataset " + datasetIndexCount + " of " + datasetChildren.size() + " (id=" + childId + ") Exception info: " + e.getMessage());
-            }
+            datasetIndexCount++;
+            logger.info("indexing dataset " + datasetIndexCount + " of " + datasetChildren.size() + " (id=" + childId + ")");
+            indexService.indexDatasetInNewTransaction(childId);
         }
         long end = System.currentTimeMillis();
         if (datasetFailureCount + dataverseFailureCount > 0){
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
index e73cce8acbe..9e73c38a5d0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
@@ -1,28 +1,9 @@
 package edu.harvard.iq.dataverse.search;
 
-import edu.harvard.iq.dataverse.ControlledVocabularyValue;
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.DataFileServiceBean;
-import edu.harvard.iq.dataverse.DataFileTag;
-import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetField;
-import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
-import edu.harvard.iq.dataverse.DatasetFieldConstant;
-import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
-import edu.harvard.iq.dataverse.DatasetFieldType;
-import edu.harvard.iq.dataverse.DatasetLinkingServiceBean;
-import edu.harvard.iq.dataverse.DatasetServiceBean;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.DataverseLinkingServiceBean;
-import edu.harvard.iq.dataverse.DataverseServiceBean;
-import edu.harvard.iq.dataverse.DvObject;
-import edu.harvard.iq.dataverse.DvObjectServiceBean;
-import edu.harvard.iq.dataverse.Embargo;
-import edu.harvard.iq.dataverse.FileMetadata;
-import edu.harvard.iq.dataverse.PermissionServiceBean;
+import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
+import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.dataaccess.DataAccessRequest;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
@@ -33,13 +14,13 @@
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import edu.harvard.iq.dataverse.util.StringUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.IOException;
 import java.io.InputStream;
 import java.sql.Timestamp;
-import java.text.NumberFormat;
 import java.text.SimpleDateFormat;
 import java.time.LocalDate;
 import java.util.ArrayList;
@@ -52,25 +33,25 @@
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
-import java.util.Optional;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.Future;
 import java.util.function.Function;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.annotation.PostConstruct;
-import javax.annotation.PreDestroy;
-import javax.ejb.AsyncResult;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.EJBException;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import static javax.ejb.TransactionAttributeType.REQUIRES_NEW;
-import javax.inject.Named;
-import javax.json.JsonObject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.PreDestroy;
+import jakarta.ejb.AsyncResult;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import static jakarta.ejb.TransactionAttributeType.REQUIRES_NEW;
+import jakarta.inject.Named;
+import jakarta.json.JsonObject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
@@ -240,7 +221,7 @@ public Future<String> indexDataverse(Dataverse dataverse, boolean processPaths)
             solrInputDocument.addField(SearchFields.SOURCE, HARVESTED);
         } else { (this means that all dataverses are "local" - should this be removed? */
         solrInputDocument.addField(SearchFields.IS_HARVESTED, false);
-        solrInputDocument.addField(SearchFields.METADATA_SOURCE, findRootDataverseCached().getName()); //rootDataverseName);
+        solrInputDocument.addField(SearchFields.METADATA_SOURCE, rootDataverse.getName()); //rootDataverseName);
         /*}*/
 
         addDataverseReleaseDateToSolrDoc(solrInputDocument, dataverse);
@@ -347,48 +328,99 @@ public Future<String> indexDataverse(Dataverse dataverse, boolean processPaths)
     }
     
     @TransactionAttribute(REQUIRES_NEW)
-    public Future<String> indexDatasetInNewTransaction(Long datasetId) throws  SolrServerException, IOException{ //Dataset dataset) {
+    public void indexDatasetInNewTransaction(Long datasetId) { //Dataset dataset) {
         boolean doNormalSolrDocCleanUp = false;
-        Dataset dataset = em.find(Dataset.class, datasetId);
-        // return indexDataset(dataset, doNormalSolrDocCleanUp);
-        Future<String> ret = indexDataset(dataset, doNormalSolrDocCleanUp);
+        Dataset dataset = datasetService.findDeep(datasetId);
+        asyncIndexDataset(dataset, doNormalSolrDocCleanUp);
         dataset = null;
-        return ret;
     }
     
-    @TransactionAttribute(REQUIRES_NEW)
-    public Future<String> indexDatasetObjectInNewTransaction(Dataset dataset) throws  SolrServerException, IOException{ //Dataset dataset) {
-        boolean doNormalSolrDocCleanUp = false;
-        // return indexDataset(dataset, doNormalSolrDocCleanUp);
-        Future<String> ret = indexDataset(dataset, doNormalSolrDocCleanUp);
-        dataset = null;
-        return ret;
+    // The following two variables are only used in the synchronized getNextToIndex method and do not need to be synchronized themselves
+
+    // nextToIndex contains datasets mapped by dataset id that were added for future indexing while the indexing was already ongoing for a given dataset
+    // (if there already was a dataset scheduled for indexing, it is overwritten and only the most recently requested version is kept in the map)
+    private static final Map<Long, Dataset> NEXT_TO_INDEX = new ConcurrentHashMap<>();
+    // indexingNow is a set of dataset ids of datasets being indexed asynchronously right now
+    private static final Map<Long, Boolean> INDEXING_NOW = new ConcurrentHashMap<>();
+
+    // When you pass null as Dataset parameter to this method, it indicates that the indexing of the dataset with "id" has finished
+    // Pass non-null Dataset to schedule it for indexing
+    synchronized private static Dataset getNextToIndex(Long id, Dataset d) {
+        if (d == null) { // -> indexing of the dataset with id has finished
+            Dataset next = NEXT_TO_INDEX.remove(id);
+            if (next == null) { // -> no new indexing jobs were requested while indexing was ongoing
+                // the job can be stopped now
+                INDEXING_NOW.remove(id);
+            }
+            return next;
+        }
+        // index job is requested for a non-null dataset
+        if (INDEXING_NOW.containsKey(id)) { // -> indexing job is already ongoing, and a new job should not be started by the current thread -> return null
+            NEXT_TO_INDEX.put(id, d);
+            return null;
+        }
+        // otherwise, start a new job
+        INDEXING_NOW.put(id, true);
+        return d;
     }
 
+    /**
+     * Indexes a dataset asynchronously.
+     * 
+     * Note that this method implement a synchronized skipping mechanism. When an
+     * indexing job is already running for a given dataset in the background, the
+     * new call will not index that dataset, but will delegate the execution to
+     * the already running job. The running job will pick up the requested indexing
+     * once that it is finished with the ongoing indexing. If another indexing is
+     * requested before the ongoing indexing is finished, only the indexing that is
+     * requested most recently will be picked up for the next indexing.
+     * 
+     * In other words: we can have at most one indexing ongoing for the given
+     * dataset, and at most one (most recent) request for reindexing of the same
+     * dataset. All requests that come between the most recent one and the ongoing
+     * one are skipped for the optimization reasons. For a more in depth discussion,
+     * see the pull request: https://github.com/IQSS/dataverse/pull/9558
+     * 
+     * @param dataset                The dataset to be indexed.
+     * @param doNormalSolrDocCleanUp Flag for normal Solr doc clean up.
+     */
     @Asynchronous
-    public Future<String> asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws  SolrServerException, IOException {
-        return indexDataset(dataset, doNormalSolrDocCleanUp);
+    public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) {
+        Long id = dataset.getId();
+        Dataset next = getNextToIndex(id, dataset); // if there is an ongoing index job for this dataset, next is null (ongoing index job will reindex the newest version after current indexing finishes)
+        while (next != null) {
+            try {
+                indexDataset(next, doNormalSolrDocCleanUp);
+            } catch (Exception e) { // catch all possible exceptions; otherwise when something unexpected happes the dataset wold remain locked and impossible to reindex
+                String failureLogText = "Indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString();
+                failureLogText += "\r\n" + e.getLocalizedMessage();
+                LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset);
+            }
+            next = getNextToIndex(id, null); // if dataset was not changed during the indexing (and no new job was requested), next is null and loop can be stopped
+        }
     }
-    
+
     @Asynchronous
-    public void asyncIndexDatasetList(List<Dataset> datasets, boolean doNormalSolrDocCleanUp) throws  SolrServerException, IOException {
+    public void asyncIndexDatasetList(List<Dataset> datasets, boolean doNormalSolrDocCleanUp) {
         for(Dataset dataset : datasets) {
-            indexDataset(dataset, true);
+            asyncIndexDataset(dataset, true);
         }
     }
     
-    public Future<String> indexDvObject(DvObject objectIn) throws  SolrServerException, IOException {
-        
+    public void indexDvObject(DvObject objectIn) throws  SolrServerException, IOException {
         if (objectIn.isInstanceofDataset() ){
-            return (indexDataset((Dataset)objectIn, true));
-        }
-        if (objectIn.isInstanceofDataverse() ){
-            return (indexDataverse((Dataverse)objectIn));
+            asyncIndexDataset((Dataset)objectIn, true);
+        } else if (objectIn.isInstanceofDataverse() ){
+            indexDataverse((Dataverse)objectIn);
         }
-        return null;
+    }
+
+    private void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws  SolrServerException, IOException {
+        doIndexDataset(dataset, doNormalSolrDocCleanUp);
+        updateLastIndexedTime(dataset.getId());
     }
     
-    public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws  SolrServerException, IOException {
+    private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws  SolrServerException, IOException {
         logger.fine("indexing dataset " + dataset.getId());
         /**
          * @todo should we use solrDocIdentifierDataset or
@@ -547,7 +579,6 @@ public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocClean
                 String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
                 logger.fine(result);
                 indexDatasetPermissions(dataset);
-                return new AsyncResult<>(result);
             } else if (latestVersionState.equals(DatasetVersion.VersionState.DEACCESSIONED)) {
 
                 desiredCards.put(DatasetVersion.VersionState.DEACCESSIONED, true);
@@ -594,11 +625,9 @@ public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocClean
                 String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
                 logger.fine(result);
                 indexDatasetPermissions(dataset);
-                return new AsyncResult<>(result);
             } else {
                 String result = "No-op. Unexpected condition reached: No released version and latest version is neither draft nor deaccessioned";
                 logger.fine(result);
-                return new AsyncResult<>(result);
             }
         } else if (atLeastOnePublishedVersion == true) {
             results.append("Published versions found. ")
@@ -651,7 +680,6 @@ public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocClean
                 String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
                 logger.fine(result);
                 indexDatasetPermissions(dataset);
-                return new AsyncResult<>(result);
             } else if (latestVersionState.equals(DatasetVersion.VersionState.DRAFT)) {
 
                 IndexableDataset indexableDraftVersion = new IndexableDataset(latestVersion);
@@ -705,16 +733,13 @@ public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocClean
                 String result = getDesiredCardState(desiredCards) + results.toString() + debug.toString();
                 logger.fine(result);
                 indexDatasetPermissions(dataset);
-                return new AsyncResult<>(result);
             } else {
                 String result = "No-op. Unexpected condition reached: There is at least one published version but the latest version is neither published nor draft";
                 logger.fine(result);
-                return new AsyncResult<>(result);
             }
         } else {
             String result = "No-op. Unexpected condition reached: Has a version been published or not?";
             logger.fine(result);
-            return new AsyncResult<>(result);
         }
     }
     
@@ -740,10 +765,11 @@ private IndexResponse indexDatasetPermissions(Dataset dataset) {
     }
 
     private String addOrUpdateDataset(IndexableDataset indexableDataset) throws  SolrServerException, IOException {
-        return addOrUpdateDataset(indexableDataset, null);
+        String result = addOrUpdateDataset(indexableDataset, null);
+        return result;
     }
 
-    public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long> datafilesInDraftVersion) throws  SolrServerException, IOException {        
+    public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long> datafilesInDraftVersion) throws  SolrServerException, IOException {
         IndexableDataset.DatasetState state = indexableDataset.getDatasetState();
         Dataset dataset = indexableDataset.getDatasetVersion().getDataset();
         logger.fine("adding or updating Solr document for dataset id " + dataset.getId());
@@ -758,11 +784,27 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
         solrInputDocument.addField(SearchFields.DATASET_PERSISTENT_ID, dataset.getGlobalId().toString());
         solrInputDocument.addField(SearchFields.PERSISTENT_URL, dataset.getPersistentURL());
         solrInputDocument.addField(SearchFields.TYPE, "datasets");
+        boolean valid;
+        if (!indexableDataset.getDatasetVersion().isDraft()) {
+            valid = true;
+        } else {
+            DatasetVersion version = indexableDataset.getDatasetVersion().cloneDatasetVersion();
+            version.setDatasetFields(version.initDatasetFields());
+            valid = version.isValid();
+        }
+        if (JvmSettings.API_ALLOW_INCOMPLETE_METADATA.lookupOptional(Boolean.class).orElse(false)) {
+            solrInputDocument.addField(SearchFields.DATASET_VALID, valid);
+        }
 
+        final Dataverse dataverse = dataset.getDataverseContext();
+        final String dvIndexableCategoryName = dataverse.getIndexableCategoryName();
+        final String dvAlias = dataverse.getAlias();
+        final String dvDisplayName = dataverse.getDisplayName();
+        final String rdvName = findRootDataverseCached().getName();
         //This only grabs the immediate parent dataverse's category. We do the same for dataverses themselves.
-        solrInputDocument.addField(SearchFields.CATEGORY_OF_DATAVERSE, dataset.getDataverseContext().getIndexableCategoryName());
-        solrInputDocument.addField(SearchFields.IDENTIFIER_OF_DATAVERSE, dataset.getDataverseContext().getAlias());
-        solrInputDocument.addField(SearchFields.DATAVERSE_NAME, dataset.getDataverseContext().getDisplayName());
+        solrInputDocument.addField(SearchFields.CATEGORY_OF_DATAVERSE, dvIndexableCategoryName);
+        solrInputDocument.addField(SearchFields.IDENTIFIER_OF_DATAVERSE, dvAlias);
+        solrInputDocument.addField(SearchFields.DATAVERSE_NAME, dvDisplayName);
         
         Date datasetSortByDate = new Date();
         Date majorVersionReleaseDate = dataset.getMostRecentMajorVersionReleaseDate();
@@ -808,7 +850,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
             solrInputDocument.addField(SearchFields.METADATA_SOURCE, HARVESTED);
         } else {
             solrInputDocument.addField(SearchFields.IS_HARVESTED, false);
-            solrInputDocument.addField(SearchFields.METADATA_SOURCE, findRootDataverseCached().getName()); //rootDataverseName);
+            solrInputDocument.addField(SearchFields.METADATA_SOURCE, rdvName); //rootDataverseName);
         }
 
         DatasetVersion datasetVersion = indexableDataset.getDatasetVersion();
@@ -827,7 +869,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
             }
 
             Set<String> langs = settingsService.getConfiguredLanguages();
-            Map<Long, JsonObject> cvocMap = datasetFieldService.getCVocConf(false);
+            Map<Long, JsonObject> cvocMap = datasetFieldService.getCVocConf(true);
             Set<String> metadataBlocksWithValue = new HashSet<>();
             for (DatasetField dsf : datasetVersion.getFlatDatasetFields()) {
 
@@ -1011,13 +1053,17 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
                             if(maxNorthLat==null || Float.parseFloat(maxNorthLat) < Float.parseFloat(northLat)) {
                                 maxNorthLat=northLat;
                             }
-                            //W, E, N, S
-                            solrInputDocument.addField(SearchFields.GEOLOCATION, "ENVELOPE(" + westLon + "," + eastLon + "," + northLat + "," + southLat + ")");
+
+                            if (DatasetFieldValueValidator.validateBoundingBox(westLon, eastLon, northLat, southLat)) {
+                                //W, E, N, S
+                                solrInputDocument.addField(SearchFields.GEOLOCATION, "ENVELOPE(" + westLon + "," + eastLon + "," + northLat + "," + southLat + ")");
+                            }
                         }
                     }
                     //Only one bbox per dataset
                     //W, E, N, S
-                    if ((minWestLon != null || maxEastLon != null) && (maxNorthLat != null || minSouthLat != null)) {
+                    if (DatasetFieldValueValidator.validateBoundingBox(minWestLon, maxEastLon, maxNorthLat, minSouthLat) &&
+                            (minWestLon != null || maxEastLon != null) && (maxNorthLat != null || minSouthLat != null)) {
                         solrInputDocument.addField(SearchFields.BOUNDING_BOX, "ENVELOPE(" + minWestLon + "," + maxEastLon + "," + maxNorthLat + "," + minSouthLat + ")");
                     }
 
@@ -1069,6 +1115,9 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
             }
             LocalDate embargoEndDate=null;
             LocalDate end = null;
+            final String datasetCitation = dataset.getCitation();
+            final Long datasetId = dataset.getId();
+            final String datasetGlobalId = dataset.getGlobalId().toString();
             for (FileMetadata fileMetadata : fileMetadatas) {
                
                 Embargo emb= fileMetadata.getDataFile().getEmbargo();
@@ -1106,7 +1155,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
                     datafileSolrInputDocument.addField(SearchFields.IDENTIFIER, fileEntityId);
                     datafileSolrInputDocument.addField(SearchFields.PERSISTENT_URL, dataset.getPersistentURL());
                     datafileSolrInputDocument.addField(SearchFields.TYPE, "files");
-                    datafileSolrInputDocument.addField(SearchFields.CATEGORY_OF_DATAVERSE, dataset.getDataverseContext().getIndexableCategoryName());
+                    datafileSolrInputDocument.addField(SearchFields.CATEGORY_OF_DATAVERSE, dvIndexableCategoryName);
                     if(end!=null) {
                         datafileSolrInputDocument.addField(SearchFields.EMBARGO_END_DATE, end.toEpochDay()); 
                     }
@@ -1148,12 +1197,15 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
                                 logger.warning(String.format("Full-text indexing for %s failed",
                                         fileMetadata.getDataFile().getDisplayName()));
                                 e.printStackTrace();
-                                continue;
                             } catch (OutOfMemoryError e) {
                                 textHandler = null;
                                 logger.warning(String.format("Full-text indexing for %s failed due to OutOfMemoryError",
                                         fileMetadata.getDataFile().getDisplayName()));
-                                continue;
+                            } catch(Error e) {
+                                //Catch everything - full-text indexing is complex enough (and using enough 3rd party components) that it can fail
+                                // and we don't want problems here to break other Dataverse functionality (e.g. edits)
+                                logger.severe(String.format("Full-text indexing for %s failed due to Error: %s : %s",
+                                        fileMetadata.getDataFile().getDisplayName(),e.getClass().getCanonicalName(), e.getLocalizedMessage()));
                             } finally {
                                 IOUtils.closeQuietly(instream);
                             }
@@ -1238,7 +1290,7 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
                             datafileSolrInputDocument.addField(SearchFields.METADATA_SOURCE, HARVESTED);
                         } else {
                             datafileSolrInputDocument.addField(SearchFields.IS_HARVESTED, false);
-                            datafileSolrInputDocument.addField(SearchFields.METADATA_SOURCE, findRootDataverseCached().getName());
+                            datafileSolrInputDocument.addField(SearchFields.METADATA_SOURCE, rdvName);
                         }
                     }
                     if (fileSortByDate == null) {
@@ -1296,16 +1348,18 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set<Long
                     datafileSolrInputDocument.addField(SearchFields.FILE_CHECKSUM_VALUE, fileMetadata.getDataFile().getChecksumValue());
                     datafileSolrInputDocument.addField(SearchFields.DESCRIPTION, fileMetadata.getDescription());
                     datafileSolrInputDocument.addField(SearchFields.FILE_DESCRIPTION, fileMetadata.getDescription());
-                    datafileSolrInputDocument.addField(SearchFields.FILE_PERSISTENT_ID, fileMetadata.getDataFile().getGlobalId().toString());
+                    GlobalId filePid = fileMetadata.getDataFile().getGlobalId();
+                    datafileSolrInputDocument.addField(SearchFields.FILE_PERSISTENT_ID,
+                            (filePid != null) ? filePid.toString() : null);
                     datafileSolrInputDocument.addField(SearchFields.UNF, fileMetadata.getDataFile().getUnf());
                     datafileSolrInputDocument.addField(SearchFields.SUBTREE, dataversePaths);
                     // datafileSolrInputDocument.addField(SearchFields.HOST_DATAVERSE,
                     // dataFile.getOwner().getOwner().getName());
                     // datafileSolrInputDocument.addField(SearchFields.PARENT_NAME,
                     // dataFile.getDataset().getTitle());
-                    datafileSolrInputDocument.addField(SearchFields.PARENT_ID, fileMetadata.getDataFile().getOwner().getId());
-                    datafileSolrInputDocument.addField(SearchFields.PARENT_IDENTIFIER, fileMetadata.getDataFile().getOwner().getGlobalId().toString());
-                    datafileSolrInputDocument.addField(SearchFields.PARENT_CITATION, fileMetadata.getDataFile().getOwner().getCitation());
+                    datafileSolrInputDocument.addField(SearchFields.PARENT_ID, datasetId);
+                    datafileSolrInputDocument.addField(SearchFields.PARENT_IDENTIFIER, datasetGlobalId);
+                    datafileSolrInputDocument.addField(SearchFields.PARENT_CITATION, datasetCitation);
 
                     datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, parentDatasetTitle);
 
@@ -1402,17 +1456,27 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set<Long> d
                 throw new IOException(ex);
             }
         }
+        return docs.getMessage();
+    }
+
+    @Asynchronous
+    private void updateLastIndexedTime(Long id) {
+        // indexing is often in a transaction with update statements
+        // if we flush on query (flush-mode auto), we want to prevent locking
+        // -> update the dataset asynchronously in a new transaction
+        updateLastIndexedTimeInNewTransaction(id);
+    }
+
+    @TransactionAttribute(REQUIRES_NEW)
+    private void updateLastIndexedTimeInNewTransaction(Long id) {
         /// Dataset updatedDataset =
         /// (Dataset)dvObjectService.updateContentIndexTime(dataset);
         /// updatedDataset = null;
         // instead of making a call to dvObjectService, let's try and
         // modify the index time stamp using the local EntityManager:
-        DvObject dvObjectToModify = em.find(DvObject.class, docs.getDatasetId());
+        DvObject dvObjectToModify = em.find(DvObject.class, id);
         dvObjectToModify.setIndexTime(new Timestamp(new Date().getTime()));
         dvObjectToModify = em.merge(dvObjectToModify);
-        dvObjectToModify = null;
-
-        return docs.getMessage();
     }
 
     /**
@@ -1616,7 +1680,11 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc
                 sid.addField(fieldName, doc.getFieldValue(fieldName));
             }
 
-            List<String> paths =  object.isInstanceofDataset() ? retrieveDVOPaths(datasetService.find(object.getId())) 
+            Dataset dataset = null;
+            if (object.isInstanceofDataset()) {
+                dataset = datasetService.findDeep(object.getId());
+            }
+            List<String> paths = object.isInstanceofDataset() ? retrieveDVOPaths(dataset)
                     : retrieveDVOPaths(dataverseService.find(object.getId()));
 
             sid.removeField(SearchFields.SUBTREE);
@@ -1624,7 +1692,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc
             UpdateResponse addResponse = solrClientService.getSolrClient().add(sid);
             UpdateResponse commitResponse = solrClientService.getSolrClient().commit();
             if (object.isInstanceofDataset()) {
-                for (DataFile df : datasetService.find(object.getId()).getFiles()) {
+                for (DataFile df : dataset.getFiles()) {
                     solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString()));
                     res = solrClientService.getSolrClient().query(solrQuery);
                     if (!res.getResults().isEmpty()) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
index f3d5f85121d..8fb7c161517 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
@@ -268,9 +268,12 @@ more targeted results for just datasets. The format is YYYY (i.e.
     public static final String FULL_TEXT = "_text_";
     public static final String EMBARGO_END_DATE = "embargoEndDate";
 
+    
     // SpatialRecursivePrefixTreeFieldType: https://solr.apache.org/guide/8_11/spatial-search.html#rpt
     public static final String GEOLOCATION = "geolocation";
     // BBoxField (bounding box): https://solr.apache.org/guide/8_11/spatial-search.html#bboxfield
     public static final String BOUNDING_BOX = "boundingBox";
 
+    public static final String DATASET_VALID = "datasetValid";
+
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java
index 2bf8807e301..8caee7d16b4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBean.java
@@ -7,11 +7,11 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.faces.context.FacesContext;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 
 @Named
 @Stateless
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java
index 2b40347828a..5a5d8781726 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchIncludeFragment.java
@@ -22,6 +22,7 @@
 import edu.harvard.iq.dataverse.ThumbnailServiceWrapper;
 import edu.harvard.iq.dataverse.WidgetWrapper;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import java.time.LocalDate;
 import java.util.ArrayList;
@@ -35,12 +36,12 @@
 import java.util.Optional;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.enterprise.context.RequestScoped;
-import javax.faces.context.FacesContext;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.RequestScoped;
+import jakarta.faces.context.FacesContext;
+import jakarta.inject.Inject;
+import jakarta.inject.Named;
+import jakarta.servlet.http.HttpServletRequest;
 import org.apache.commons.lang3.StringUtils;
 
 
@@ -120,7 +121,6 @@ public class SearchIncludeFragment implements java.io.Serializable {
     private Long facetCountDatasets = 0L;
     private Long facetCountFiles = 0L;
     Map<String, Long> previewCountbyType = new HashMap<>();
-    private SolrQueryResponse solrQueryResponseAllTypes;
     private String sortField;
     private SortOrder sortOrder;
     private String currentSort;
@@ -132,6 +132,7 @@ public class SearchIncludeFragment implements java.io.Serializable {
     Map<String, String> datasetfieldFriendlyNamesBySolrField = new HashMap<>();
     Map<String, String> staticSolrFieldFriendlyNamesBySolrField = new HashMap<>();
     private boolean solrIsDown = false;
+    private boolean solrIsTemporarilyUnavailable = false; 
     private Map<String, Integer> numberOfFacets = new HashMap<>();
 //    private boolean showUnpublished;
     List<String> filterQueriesDebug = new ArrayList<>();
@@ -213,7 +214,7 @@ public String searchRedirect(String dataverseRedirectPage, Dataverse dataverseIn
             qParam = "&q=" + query;
         }
 
-        return widgetWrapper.wrapURL(dataverseRedirectPage + "?faces-redirect=true&q=" + qParam + optionalDataverseScope);
+        return widgetWrapper.wrapURL(dataverseRedirectPage + "?faces-redirect=true" + qParam + optionalDataverseScope);
 
     }
 
@@ -279,8 +280,9 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused
 
 
         SolrQueryResponse solrQueryResponse = null;
+        SolrQueryResponse solrQueryResponseSecondPass = null;
 
-        List<String> filterQueriesFinal = new ArrayList<>();
+        List<String> filterQueriesExtended = new ArrayList<>();
         
         if (dataverseAlias != null) {
             this.dataverse = dataverseService.findByAlias(dataverseAlias);
@@ -294,7 +296,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused
                  * @todo centralize this into SearchServiceBean
                  */
                 if (!isfilterQueryAlreadyInMap(filterDownToSubtree)){
-                    filterQueriesFinal.add(filterDownToSubtree);
+                    filterQueriesExtended.add(filterDownToSubtree);
                 }
 //                this.dataverseSubtreeContext = dataversePath;
             } else {
@@ -307,22 +309,23 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused
             this.setRootDv(true);
         }
 
+        filterQueriesExtended.addAll(filterQueries);
+
+        /**
+         * Add type queries, for the types (Dataverses, Datasets, Datafiles) 
+         * currently selected:
+         */
         selectedTypesList = new ArrayList<>();
         String[] parts = selectedTypesString.split(":");
         selectedTypesList.addAll(Arrays.asList(parts));
-
-        List<String> filterQueriesFinalAllTypes = new ArrayList<>();
+                
         String[] arr = selectedTypesList.toArray(new String[selectedTypesList.size()]);
         selectedTypesHumanReadable = combine(arr, " OR ");
         if (!selectedTypesHumanReadable.isEmpty()) {
             typeFilterQuery = SearchFields.TYPE + ":(" + selectedTypesHumanReadable + ")";
-        }
-        
-        filterQueriesFinal.addAll(filterQueries);
-        filterQueriesFinalAllTypes.addAll(filterQueriesFinal); 
-
-        String allTypesFilterQuery = SearchFields.TYPE + ":(dataverses OR datasets OR files)";
-        filterQueriesFinalAllTypes.add(allTypesFilterQuery);
+        } 
+        List<String> filterQueriesFinal = new ArrayList<>();
+        filterQueriesFinal.addAll(filterQueriesExtended);
         filterQueriesFinal.add(typeFilterQuery);
 
         if (page <= 1) {
@@ -344,6 +347,7 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused
         try {
             logger.fine("ATTENTION! query from user:   " + query);
             logger.fine("ATTENTION! queryToPassToSolr: " + queryToPassToSolr);
+            logger.fine("ATTENTION! filterQueriesFinal: " + filterQueriesFinal.toString());
             logger.fine("ATTENTION! sort by: " + sortField);
 
             /**
@@ -355,18 +359,79 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused
             DataverseRequest dataverseRequest = new DataverseRequest(session.getUser(), httpServletRequest);
             List<Dataverse> dataverses = new ArrayList<>();
             dataverses.add(dataverse);
-            solrQueryResponse = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null);
+            solrQueryResponse = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null, !isFacetsDisabled(), true);
             if (solrQueryResponse.hasError()){
                 logger.info(solrQueryResponse.getError());
                 setSolrErrorEncountered(true);
+            } 
+            // Solr "temporarily unavailable" is the condition triggered by 
+            // receiving a 503 from the search engine, that is in turn a result
+            // of one of the Solr "circuit breakers" being triggered by excessive
+            // load. We treat this condition as distinct from "Solr is down", 
+            // on the assumption that it is transitive. 
+            if (solrQueryResponse.isSolrTemporarilyUnavailable()) {
+                setSolrTemporarilyUnavailable(true);
             }
             // This 2nd search() is for populating the "type" ("dataverse", "dataset", "file") facets: -- L.A. 
             // (why exactly do we need it, again?)
             // To get the counts we display in the types facets particulary for unselected types - SEK 08/25/2021
-            solrQueryResponseAllTypes = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalAllTypes, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false, null, null);
-            if (solrQueryResponse.hasError()){
-                logger.info(solrQueryResponse.getError());
-                setSolrErrorEncountered(true);
+            // Sure, but we should not waste resources here. We will try to save 
+            // solr some extra work and a) only run this second query IF there is 
+            // one or more unselected type facets; and b) drop all the extra 
+            // parameters from this second query - such as facets and highlights -
+            // that we do not actually need for the purposes of finding these 
+            // extra numbers. -- L.A. 10/16/2023
+            
+            // populate preview counts: https://redmine.hmdc.harvard.edu/issues/3560
+            previewCountbyType.put(BundleUtil.getStringFromBundle("dataverses"), 0L);
+            previewCountbyType.put(BundleUtil.getStringFromBundle("datasets"), 0L);
+            previewCountbyType.put(BundleUtil.getStringFromBundle("files"), 0L);
+            
+            
+            // This will populate the type facet counts for the types that are 
+            // currently selected on the collection page:
+            for (FacetCategory facetCategory : solrQueryResponse.getTypeFacetCategories()) {
+                for (FacetLabel facetLabel : facetCategory.getFacetLabel()) {
+                    previewCountbyType.put(facetLabel.getName(), facetLabel.getCount());
+                }
+            }
+            
+            if (!wasSolrErrorEncountered() && selectedTypesList.size() < 3 && !isSolrTemporarilyUnavailable() && !isFacetsDisabled()) {
+                // If some types are NOT currently selected, we will need to 
+                // run a second search to obtain the numbers of the unselected types:
+                
+                List<String> filterQueriesFinalSecondPass = new ArrayList<>();
+                filterQueriesFinalSecondPass.addAll(filterQueriesExtended);
+                   
+                arr = new String[3];
+                int c = 0;
+                for (String dvObjectType : Arrays.asList("dataverses", "datasets", "files")) {
+                    if (!selectedTypesList.contains(dvObjectType)) {
+                        arr[c++] = dvObjectType;
+                    }
+                }
+                filterQueriesFinalSecondPass.add(SearchFields.TYPE + ":(" + combine(arr, " OR ", c) + ")");
+                logger.fine("second pass query: " + queryToPassToSolr);
+                logger.fine("second pass filter query: "+filterQueriesFinalSecondPass.toString());
+
+                solrQueryResponseSecondPass = searchService.search(dataverseRequest, dataverses, queryToPassToSolr, filterQueriesFinalSecondPass, null, sortOrder.toString(), 0, onlyDataRelatedToMe, 1, false, null, null, false, false);
+
+                if (solrQueryResponseSecondPass != null) {
+
+                    if (solrQueryResponseSecondPass.hasError()) {
+                        logger.fine(solrQueryResponseSecondPass.getError());
+                        setSolrErrorEncountered(true);
+                    }
+
+                    // And now populate the remaining type facets:
+                    for (FacetCategory facetCategory : solrQueryResponseSecondPass.getTypeFacetCategories()) {
+                        for (FacetLabel facetLabel : facetCategory.getFacetLabel()) {
+                            previewCountbyType.put(facetLabel.getName(), facetLabel.getCount());
+                        }
+                    }
+                } else {
+                    logger.warning("null solr response from the second pass type query");
+                }
             }
             
         } catch (SearchException ex) {
@@ -446,17 +511,6 @@ The real issue here (https://github.com/IQSS/dataverse/issues/7304) is caused
                 }
             }
 
-            // populate preview counts: https://redmine.hmdc.harvard.edu/issues/3560
-            previewCountbyType.put(BundleUtil.getStringFromBundle("dataverses"), 0L);
-            previewCountbyType.put(BundleUtil.getStringFromBundle("datasets"), 0L);
-            previewCountbyType.put(BundleUtil.getStringFromBundle("files"), 0L);
-            if (solrQueryResponseAllTypes != null) {
-                for (FacetCategory facetCategory : solrQueryResponseAllTypes.getTypeFacetCategories()) {
-                    for (FacetLabel facetLabel : facetCategory.getFacetLabel()) {
-                        previewCountbyType.put(facetLabel.getName(), facetLabel.getCount());
-                    }
-                }
-            }
             
             setDisplayCardValues();
             
@@ -606,6 +660,10 @@ public void incrementFacets(String name, int incrementNum) {
     // http://stackoverflow.com/questions/1515437/java-function-for-arrays-like-phps-join/1515548#1515548
     String combine(String[] s, String glue) {
         int k = s.length;
+        return combine(s, glue, k);
+    }
+    
+    String combine(String[] s, String glue, int k) {
         if (k == 0) {
             return null;
         }
@@ -1020,7 +1078,29 @@ public boolean isSolrIsDown() {
     public void setSolrIsDown(boolean solrIsDown) {
         this.solrIsDown = solrIsDown;
     }
+    
+    public boolean isSolrTemporarilyUnavailable() {
+        return solrIsTemporarilyUnavailable;
+    }
+    
+    public void setSolrTemporarilyUnavailable(boolean solrIsTemporarilyUnavailable) {
+        this.solrIsTemporarilyUnavailable = solrIsTemporarilyUnavailable; 
+    }
 
+    /**
+     * Indicates that the fragment should not be requesting facets in Solr 
+     * searches and rendering them on the page.
+     * @return true if disabled; false by default 
+     */
+    public boolean isFacetsDisabled() {
+        // The method is used in rendered="..." logic. So we are using 
+        // SettingsWrapper to make sure we are not looking it up repeatedly 
+        // (settings are not expensive to look up, but 
+        // still).
+        
+        return settingsWrapper.isTrueForKey(SettingsServiceBean.Key.DisableSolrFacets, false);
+    }
+    
     public boolean isRootDv() {
         return rootDv;
     }
@@ -1300,7 +1380,7 @@ public void setDisplayCardValues() {
                 result.setImageUrl(thumbnailServiceWrapper.getDataverseCardImageAsBase64Url(result));
             } else if (result.getType().equals("datasets")) {
                 if (result.getEntity() != null) {
-                    result.setImageUrl(thumbnailServiceWrapper.getDatasetCardImageAsBase64Url(result));
+                    result.setImageUrl(thumbnailServiceWrapper.getDatasetCardImageAsUrl(result));
                 }
                 
                 if (result.isHarvested()) {
@@ -1395,6 +1475,10 @@ public boolean isActivelyEmbargoed(SolrSearchResult result) {
         }
     }
     
+    public boolean isValid(SolrSearchResult result) {
+        return result.isValid();
+    }
+    
     public enum SortOrder {
 
         asc, desc
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java
index e96164d442d..0dd2153f75b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchPermissionsServiceBean.java
@@ -22,9 +22,9 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 
 /**
  * Determine whether items should be searchable.
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
index b87a334e938..51bf3bee30b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
@@ -20,7 +20,6 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.IOException;
 import java.lang.reflect.Field;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Calendar;
@@ -36,16 +35,16 @@
 import java.util.MissingResourceException;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.EJBTransactionRolledbackException;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionRolledbackLocalException;
-import javax.inject.Named;
-import javax.persistence.NoResultException;
+import jakarta.ejb.EJB;
+import jakarta.ejb.EJBTransactionRolledbackException;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionRolledbackLocalException;
+import jakarta.inject.Named;
+import jakarta.persistence.NoResultException;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.SolrQuery.SortClause;
 import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.HttpSolrClient.RemoteSolrException;
+import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException;
 import org.apache.solr.client.solrj.response.FacetField;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.RangeFacet;
@@ -102,7 +101,7 @@ public class SearchServiceBean {
     public SolrQueryResponse search(DataverseRequest dataverseRequest, List<Dataverse> dataverses, String query, List<String> filterQueries, String sortField, String sortOrder, int paginationStart, boolean onlyDatatRelatedToMe, int numResultsPerPage) throws SearchException {
         return search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, paginationStart, onlyDatatRelatedToMe, numResultsPerPage, true, null, null);
     }
-
+    
     /**
      * Import note: "onlyDatatRelatedToMe" relies on filterQueries for providing
      * access to Private Data for the correct user
@@ -123,6 +122,41 @@ public SolrQueryResponse search(DataverseRequest dataverseRequest, List<Datavers
      * @param retrieveEntities - look up dvobject entities with .find() (potentially expensive!)
      * @param geoPoint e.g. "35,15"
      * @param geoRadius e.g. "5"
+
+     * @return
+     * @throws SearchException
+     */
+    public SolrQueryResponse search(
+            DataverseRequest dataverseRequest, 
+            List<Dataverse> dataverses, 
+            String query, 
+            List<String> filterQueries, 
+            String sortField, 
+            String sortOrder, 
+            int paginationStart, 
+            boolean onlyDatatRelatedToMe, 
+            int numResultsPerPage,
+            boolean retrieveEntities,
+            String geoPoint,
+            String geoRadius) throws SearchException {
+        return search(dataverseRequest, dataverses, query, filterQueries, sortField, sortOrder, paginationStart, onlyDatatRelatedToMe, numResultsPerPage, true, null, null, true, true);
+    }
+
+    /**
+     * @param dataverseRequest
+     * @param dataverses
+     * @param query
+     * @param filterQueries
+     * @param sortField
+     * @param sortOrder
+     * @param paginationStart
+     * @param onlyDatatRelatedToMe
+     * @param numResultsPerPage
+     * @param retrieveEntities - look up dvobject entities with .find() (potentially expensive!)
+     * @param geoPoint e.g. "35,15"
+     * @param geoRadius e.g. "5"
+     * @param addFacets boolean
+     * @param addHighlights boolean
      * @return
      * @throws SearchException
      */
@@ -131,13 +165,16 @@ public SolrQueryResponse search(
             List<Dataverse> dataverses,
             String query,
             List<String> filterQueries,
-            String sortField, String sortOrder,
+            String sortField, 
+            String sortOrder,
             int paginationStart,
             boolean onlyDatatRelatedToMe,
             int numResultsPerPage,
             boolean retrieveEntities,
             String geoPoint,
-            String geoRadius
+            String geoRadius,
+            boolean addFacets,
+            boolean addHighlights
     ) throws SearchException {
 
         if (paginationStart < 0) {
@@ -153,68 +190,27 @@ public SolrQueryResponse search(
 //        SortClause foo = new SortClause("name", SolrQuery.ORDER.desc);
 //        if (query.equals("*") || query.equals("*:*")) {
 //            solrQuery.setSort(new SortClause(SearchFields.NAME_SORT, SolrQuery.ORDER.asc));
-        solrQuery.setSort(new SortClause(sortField, sortOrder));
+        if (sortField != null) {
+            // is it ok not to specify any sort? - there are cases where we 
+            // don't care, and it must cost some extra cycles -- L.A.
+            solrQuery.setSort(new SortClause(sortField, sortOrder));
+        }
 //        } else {
 //            solrQuery.setSort(sortClause);
 //        }
 //        solrQuery.setSort(sortClause);
-        solrQuery.setHighlight(true).setHighlightSnippets(1);
-        Integer fragSize = systemConfig.getSearchHighlightFragmentSize();
-        if (fragSize != null) {
-            solrQuery.setHighlightFragsize(fragSize);
-        }
-        solrQuery.setHighlightSimplePre("<span class=\"search-term-match\">");
-        solrQuery.setHighlightSimplePost("</span>");
-        Map<String, String> solrFieldsToHightlightOnMap = new HashMap<>();
-        // TODO: Do not hard code "Name" etc as English here.
-        solrFieldsToHightlightOnMap.put(SearchFields.NAME, "Name");
-        solrFieldsToHightlightOnMap.put(SearchFields.AFFILIATION, "Affiliation");
-        solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_FRIENDLY, "File Type");
-        solrFieldsToHightlightOnMap.put(SearchFields.DESCRIPTION, "Description");
-        solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NAME, "Variable Name");
-        solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_LABEL, "Variable Label");
-        solrFieldsToHightlightOnMap.put(SearchFields.LITERAL_QUESTION, BundleUtil.getStringFromBundle("search.datasets.literalquestion"));
-        solrFieldsToHightlightOnMap.put(SearchFields.INTERVIEW_INSTRUCTIONS, BundleUtil.getStringFromBundle("search.datasets.interviewinstructions"));
-        solrFieldsToHightlightOnMap.put(SearchFields.POST_QUESTION, BundleUtil.getStringFromBundle("search.datasets.postquestion"));
-        solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_UNIVERSE, BundleUtil.getStringFromBundle("search.datasets.variableuniverse"));
-        solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NOTES, BundleUtil.getStringFromBundle("search.datasets.variableNotes"));
-
-        solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_SEARCHABLE, "File Type");
-        solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PUBLICATION_DATE, "Publication Year");
-        solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.datasets.persistentId"));
-        solrFieldsToHightlightOnMap.put(SearchFields.FILE_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.files.persistentId"));
-        /**
-         * @todo Dataverse subject and affiliation should be highlighted but
-         * this is commented out right now because the "friendly" names are not
-         * being shown on the dataverse cards. See also
-         * https://github.com/IQSS/dataverse/issues/1431
-         */
-//        solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_SUBJECT, "Subject");
-//        solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_AFFILIATION, "Affiliation");
-        /**
-         * @todo: show highlight on file card?
-         * https://redmine.hmdc.harvard.edu/issues/3848
-         */
-        solrFieldsToHightlightOnMap.put(SearchFields.FILENAME_WITHOUT_EXTENSION, "Filename Without Extension");
-        solrFieldsToHightlightOnMap.put(SearchFields.FILE_TAG_SEARCHABLE, "File Tag");
-        List<DatasetFieldType> datasetFields = datasetFieldService.findAllOrderedById();
-        for (DatasetFieldType datasetFieldType : datasetFields) {
-            String solrField = datasetFieldType.getSolrField().getNameSearchable();
-            String displayName = datasetFieldType.getDisplayName();
-            solrFieldsToHightlightOnMap.put(solrField, displayName);
-        }
-        for (Map.Entry<String, String> entry : solrFieldsToHightlightOnMap.entrySet()) {
-            String solrField = entry.getKey();
-            // String displayName = entry.getValue();
-            solrQuery.addHighlightField(solrField);
-        }
+
+        
         solrQuery.setParam("fl", "*,score");
         solrQuery.setParam("qt", "/select");
         solrQuery.setParam("facet", "true");
+        
         /**
          * @todo: do we need facet.query?
          */
         solrQuery.setParam("facet.query", "*");
+        solrQuery.addFacetField(SearchFields.TYPE); // this one is always performed
+
         for (String filterQuery : filterQueries) {
             solrQuery.addFilterQuery(filterQuery);
         }
@@ -224,70 +220,127 @@ public SolrQueryResponse search(
             // See https://solr.apache.org/guide/8_11/spatial-search.html#bbox
             solrQuery.addFilterQuery("{!bbox sfield=" + SearchFields.GEOLOCATION + "}");
         }
+        
+        List<DataverseMetadataBlockFacet> metadataBlockFacets = new LinkedList<>();
 
-        // -----------------------------------
-        // Facets to Retrieve
-        // -----------------------------------
-        solrQuery.addFacetField(SearchFields.METADATA_TYPES);
-//        solrQuery.addFacetField(SearchFields.HOST_DATAVERSE);
-//        solrQuery.addFacetField(SearchFields.AUTHOR_STRING);
-        solrQuery.addFacetField(SearchFields.DATAVERSE_CATEGORY);
-        solrQuery.addFacetField(SearchFields.METADATA_SOURCE);
-//        solrQuery.addFacetField(SearchFields.AFFILIATION);
-        solrQuery.addFacetField(SearchFields.PUBLICATION_YEAR);
-//        solrQuery.addFacetField(SearchFields.CATEGORY);
-//        solrQuery.addFacetField(SearchFields.FILE_TYPE_MIME);
-//        solrQuery.addFacetField(SearchFields.DISTRIBUTOR);
-//        solrQuery.addFacetField(SearchFields.KEYWORD);
-        /**
-         * @todo when a new method on datasetFieldService is available
-         * (retrieveFacetsByDataverse?) only show the facets that the dataverse
-         * in question wants to show (and in the right order):
-         * https://redmine.hmdc.harvard.edu/issues/3490
-         *
-         * also, findAll only returns advancedSearchField = true... we should
-         * probably introduce the "isFacetable" boolean rather than caring about
-         * if advancedSearchField is true or false
-         *
-         */
+        if (addFacets) {
+            // -----------------------------------
+            // Facets to Retrieve
+            // -----------------------------------
+            solrQuery.addFacetField(SearchFields.METADATA_TYPES);
+            solrQuery.addFacetField(SearchFields.DATAVERSE_CATEGORY);
+            solrQuery.addFacetField(SearchFields.METADATA_SOURCE);
+            solrQuery.addFacetField(SearchFields.PUBLICATION_YEAR);
+            /**
+             * @todo when a new method on datasetFieldService is available
+             * (retrieveFacetsByDataverse?) only show the facets that the
+             * dataverse in question wants to show (and in the right order):
+             * https://redmine.hmdc.harvard.edu/issues/3490
+             *
+             * also, findAll only returns advancedSearchField = true... we
+             * should probably introduce the "isFacetable" boolean rather than
+             * caring about if advancedSearchField is true or false
+             *
+             */
+
+            if (dataverses != null) {
+                for (Dataverse dataverse : dataverses) {
+                    if (dataverse != null) {
+                        for (DataverseFacet dataverseFacet : dataverse.getDataverseFacets()) {
+                            DatasetFieldType datasetField = dataverseFacet.getDatasetFieldType();
+                            solrQuery.addFacetField(datasetField.getSolrField().getNameFacetable());
+                        }
+                        // Get all metadata block facets configured to be displayed
+                        metadataBlockFacets.addAll(dataverse.getMetadataBlockFacets());
+                    }
+                }
+            }
+            
+            solrQuery.addFacetField(SearchFields.FILE_TYPE);
+            /**
+            * @todo: hide the extra line this shows in the GUI... at least it's
+            * last...
+            */
+            solrQuery.addFacetField(SearchFields.FILE_TAG);
+            if (!systemConfig.isPublicInstall()) {
+                solrQuery.addFacetField(SearchFields.ACCESS);
+            }
+        }
+
+        List<DatasetFieldType> datasetFields = datasetFieldService.findAllOrderedById();
+        Map<String, String> solrFieldsToHightlightOnMap = new HashMap<>();
+        if (addHighlights) {
+            solrQuery.setHighlight(true).setHighlightSnippets(1);
+            Integer fragSize = systemConfig.getSearchHighlightFragmentSize();
+            if (fragSize != null) {
+                solrQuery.setHighlightFragsize(fragSize);
+            }
+            solrQuery.setHighlightSimplePre("<span class=\"search-term-match\">");
+            solrQuery.setHighlightSimplePost("</span>");
+
+            // TODO: Do not hard code "Name" etc as English here.
+            solrFieldsToHightlightOnMap.put(SearchFields.NAME, "Name");
+            solrFieldsToHightlightOnMap.put(SearchFields.AFFILIATION, "Affiliation");
+            solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_FRIENDLY, "File Type");
+            solrFieldsToHightlightOnMap.put(SearchFields.DESCRIPTION, "Description");
+            solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NAME, "Variable Name");
+            solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_LABEL, "Variable Label");
+            solrFieldsToHightlightOnMap.put(SearchFields.LITERAL_QUESTION, BundleUtil.getStringFromBundle("search.datasets.literalquestion"));
+            solrFieldsToHightlightOnMap.put(SearchFields.INTERVIEW_INSTRUCTIONS, BundleUtil.getStringFromBundle("search.datasets.interviewinstructions"));
+            solrFieldsToHightlightOnMap.put(SearchFields.POST_QUESTION, BundleUtil.getStringFromBundle("search.datasets.postquestion"));
+            solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_UNIVERSE, BundleUtil.getStringFromBundle("search.datasets.variableuniverse"));
+            solrFieldsToHightlightOnMap.put(SearchFields.VARIABLE_NOTES, BundleUtil.getStringFromBundle("search.datasets.variableNotes"));
+
+            solrFieldsToHightlightOnMap.put(SearchFields.FILE_TYPE_SEARCHABLE, "File Type");
+            solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PUBLICATION_DATE, "Publication Year");
+            solrFieldsToHightlightOnMap.put(SearchFields.DATASET_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.datasets.persistentId"));
+            solrFieldsToHightlightOnMap.put(SearchFields.FILE_PERSISTENT_ID, BundleUtil.getStringFromBundle("advanced.search.files.persistentId"));
+            /**
+             * @todo Dataverse subject and affiliation should be highlighted but
+             * this is commented out right now because the "friendly" names are
+             * not being shown on the dataverse cards. See also
+             * https://github.com/IQSS/dataverse/issues/1431
+             */
+//        solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_SUBJECT, "Subject");
+//        solrFieldsToHightlightOnMap.put(SearchFields.DATAVERSE_AFFILIATION, "Affiliation");
+            /**
+             * @todo: show highlight on file card?
+             * https://redmine.hmdc.harvard.edu/issues/3848
+             */
+            solrFieldsToHightlightOnMap.put(SearchFields.FILENAME_WITHOUT_EXTENSION, "Filename Without Extension");
+            solrFieldsToHightlightOnMap.put(SearchFields.FILE_TAG_SEARCHABLE, "File Tag");
+
+            for (DatasetFieldType datasetFieldType : datasetFields) {
+                String solrField = datasetFieldType.getSolrField().getNameSearchable();
+                String displayName = datasetFieldType.getDisplayName();
+                solrFieldsToHightlightOnMap.put(solrField, displayName);
+            }
+            for (Map.Entry<String, String> entry : solrFieldsToHightlightOnMap.entrySet()) {
+                String solrField = entry.getKey();
+                // String displayName = entry.getValue();
+                solrQuery.addHighlightField(solrField);
+            }
+        }
 
-        List<DataverseMetadataBlockFacet> metadataBlockFacets = new LinkedList<>();
         //I'm not sure if just adding null here is good for hte permissions system... i think it needs something
         if(dataverses != null) {
             for(Dataverse dataverse : dataverses) {
                 // -----------------------------------
                 // PERMISSION FILTER QUERY
                 // -----------------------------------
-                String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, dataverse, onlyDatatRelatedToMe);
+                String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, dataverse, onlyDatatRelatedToMe, addFacets);
                 if (permissionFilterQuery != null) {
                     solrQuery.addFilterQuery(permissionFilterQuery);
                 }
-                if (dataverse != null) {
-                    for (DataverseFacet dataverseFacet : dataverse.getDataverseFacets()) {
-                        DatasetFieldType datasetField = dataverseFacet.getDatasetFieldType();
-                        solrQuery.addFacetField(datasetField.getSolrField().getNameFacetable());
-                    }
-                    // Get all metadata block facets configured to be displayed
-                    metadataBlockFacets.addAll(dataverse.getMetadataBlockFacets());
-                }
             }
         } else {
-            String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, null, onlyDatatRelatedToMe);
+            String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, null, onlyDatatRelatedToMe, addFacets);
             if (permissionFilterQuery != null) {
                 solrQuery.addFilterQuery(permissionFilterQuery);
             }
         }
 
-        solrQuery.addFacetField(SearchFields.FILE_TYPE);
-        /**
-         * @todo: hide the extra line this shows in the GUI... at least it's
-         * last...
-         */
-        solrQuery.addFacetField(SearchFields.TYPE);
-        solrQuery.addFacetField(SearchFields.FILE_TAG);
-        if (!systemConfig.isPublicInstall()) {
-            solrQuery.addFacetField(SearchFields.ACCESS);
-        }
+        
         /**
          * @todo: do sanity checking... throw error if negative
          */
@@ -329,10 +382,32 @@ public SolrQueryResponse search(
         // Make the solr query
         // -----------------------------------
         QueryResponse queryResponse = null;
+        
         try {
             queryResponse = solrClientService.getSolrClient().query(solrQuery);
+
         } catch (RemoteSolrException ex) {
             String messageFromSolr = ex.getLocalizedMessage();
+            
+            logger.fine("message from the solr exception: "+messageFromSolr+"; code: "+ex.code());
+            
+            SolrQueryResponse exceptionSolrQueryResponse = new SolrQueryResponse(solrQuery);
+
+            // We probably shouldn't be assuming that this is necessarily a 
+            // "search syntax error", as the code below implies - could be 
+            // something else too - ? 
+            
+            // Specifically, we now rely on the Solr "circuit breaker" mechanism
+            // to start dropping requests with 503, when the service is 
+            // overwhelmed with requests load (with the assumption that this is
+            // a transient condition): 
+            
+            if (ex.code() == 503) {
+                // actual logic for communicating this state back to the local 
+                // client code TBD (@todo)
+                exceptionSolrQueryResponse.setSolrTemporarilyUnavailable(true);
+            }
+            
             String error = "Search Syntax Error: ";
             String stringToHide = "org.apache.solr.search.SyntaxError: ";
             if (messageFromSolr.startsWith(stringToHide)) {
@@ -342,10 +417,10 @@ public SolrQueryResponse search(
                 error += messageFromSolr;
             }
             logger.info(error);
-            SolrQueryResponse exceptionSolrQueryResponse = new SolrQueryResponse(solrQuery);
             exceptionSolrQueryResponse.setError(error);
 
             // we can't show anything because of the search syntax error
+                        
             long zeroNumResultsFound = 0;
             long zeroGetResultsStart = 0;
             List<SolrSearchResult> emptySolrSearchResults = new ArrayList<>();
@@ -361,6 +436,12 @@ public SolrQueryResponse search(
         } catch (SolrServerException | IOException ex) {
             throw new SearchException("Internal Dataverse Search Engine Error", ex);
         }
+        
+        int statusCode = queryResponse.getStatus();
+        
+        logger.fine("status code of the query response: "+statusCode);
+        logger.fine("_size from query response: "+queryResponse._size());
+        logger.fine("qtime: "+queryResponse.getQTime());
 
         SolrDocumentList docs = queryResponse.getResults();
         List<SolrSearchResult> solrSearchResults = new ArrayList<>();
@@ -414,36 +495,47 @@ public SolrQueryResponse search(
             String identifierOfDataverse = (String) solrDocument.getFieldValue(SearchFields.IDENTIFIER_OF_DATAVERSE);
             String nameOfDataverse = (String) solrDocument.getFieldValue(SearchFields.DATAVERSE_NAME);
             Long embargoEndDate = (Long) solrDocument.getFieldValue(SearchFields.EMBARGO_END_DATE);
+            Boolean datasetValid = (Boolean) solrDocument.getFieldValue(SearchFields.DATASET_VALID);
             
             List<String> matchedFields = new ArrayList<>();
-            List<Highlight> highlights = new ArrayList<>();
-            Map<SolrField, Highlight> highlightsMap = new HashMap<>();
-            Map<SolrField, List<String>> highlightsMap2 = new HashMap<>();
-            Map<String, Highlight> highlightsMap3 = new HashMap<>();
-            if (queryResponse.getHighlighting().get(id) != null) {
-                for (Map.Entry<String, String> entry : solrFieldsToHightlightOnMap.entrySet()) {
-                    String field = entry.getKey();
-                    String displayName = entry.getValue();
-
-                    List<String> highlightSnippets = queryResponse.getHighlighting().get(id).get(field);
-                    if (highlightSnippets != null) {
-                        matchedFields.add(field);
-                        /**
-                         * @todo only SolrField.SolrType.STRING? that's not
-                         * right... knit the SolrField object more into the
-                         * highlighting stuff
-                         */
-                        SolrField solrField = new SolrField(field, SolrField.SolrType.STRING, true, true);
-                        Highlight highlight = new Highlight(solrField, highlightSnippets, displayName);
-                        highlights.add(highlight);
-                        highlightsMap.put(solrField, highlight);
-                        highlightsMap2.put(solrField, highlightSnippets);
-                        highlightsMap3.put(field, highlight);
+            
+            SolrSearchResult solrSearchResult = new SolrSearchResult(query, name);
+            
+            if (addHighlights) {
+                List<Highlight> highlights = new ArrayList<>();
+                Map<SolrField, Highlight> highlightsMap = new HashMap<>();
+                Map<SolrField, List<String>> highlightsMap2 = new HashMap<>();
+                Map<String, Highlight> highlightsMap3 = new HashMap<>();
+                if (queryResponse.getHighlighting().get(id) != null) {
+                    for (Map.Entry<String, String> entry : solrFieldsToHightlightOnMap.entrySet()) {
+                        String field = entry.getKey();
+                        String displayName = entry.getValue();
+
+                        List<String> highlightSnippets = queryResponse.getHighlighting().get(id).get(field);
+                        if (highlightSnippets != null) {
+                            matchedFields.add(field);
+                            /**
+                             * @todo only SolrField.SolrType.STRING? that's not
+                             * right... knit the SolrField object more into the
+                             * highlighting stuff
+                             */
+                            SolrField solrField = new SolrField(field, SolrField.SolrType.STRING, true, true);
+                            Highlight highlight = new Highlight(solrField, highlightSnippets, displayName);
+                            highlights.add(highlight);
+                            highlightsMap.put(solrField, highlight);
+                            highlightsMap2.put(solrField, highlightSnippets);
+                            highlightsMap3.put(field, highlight);
+                        }
                     }
+
                 }
 
+                solrSearchResult.setHighlightsAsList(highlights);
+                solrSearchResult.setHighlightsMap(highlightsMap);
+                solrSearchResult.setHighlightsAsMap(highlightsMap3);
             }
-            SolrSearchResult solrSearchResult = new SolrSearchResult(query, name);
+            
+            
             /**
              * @todo put all this in the constructor?
              */
@@ -470,14 +562,13 @@ public SolrQueryResponse search(
             solrSearchResult.setNameSort(nameSort);
             solrSearchResult.setReleaseOrCreateDate(release_or_create_date);
             solrSearchResult.setMatchedFields(matchedFields);
-            solrSearchResult.setHighlightsAsList(highlights);
-            solrSearchResult.setHighlightsMap(highlightsMap);
-            solrSearchResult.setHighlightsAsMap(highlightsMap3);
+            
             Map<String, String> parent = new HashMap<>();
             String description = (String) solrDocument.getFieldValue(SearchFields.DESCRIPTION);
             solrSearchResult.setDescriptionNoSnippet(description);
             solrSearchResult.setDeaccessionReason(deaccessionReason);
             solrSearchResult.setDvTree(dvTree);
+            solrSearchResult.setDatasetValid(datasetValid);
 
             String originSource = (String) solrDocument.getFieldValue(SearchFields.METADATA_SOURCE);
             if (IndexServiceBean.HARVESTED.equals(originSource)) {
@@ -862,7 +953,7 @@ public String getCapitalizedName(String name) {
      *
      * @return
      */
-    private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQuery solrQuery, Dataverse dataverse, boolean onlyDatatRelatedToMe) {
+    private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQuery solrQuery, Dataverse dataverse, boolean onlyDatatRelatedToMe, boolean addFacets) {
 
         User user = dataverseRequest.getUser();
         if (user == null) {
@@ -921,9 +1012,11 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ
 
         AuthenticatedUser au = (AuthenticatedUser) user;
 
-        // Logged in user, has publication status facet
-        //
-        solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS);
+        if (addFacets) {
+            // Logged in user, has publication status facet
+            //
+            solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS);
+        }
 
         // ----------------------------------------------------
         // (3) Is this a Super User?
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java
index 0dc2fe08b54..b36130de7c8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java
@@ -10,11 +10,11 @@
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 
-import javax.annotation.PostConstruct;
-import javax.annotation.PreDestroy;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.PreDestroy;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.inject.Named;
 import java.io.IOException;
 import java.util.logging.Logger;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
index 5856004ce53..04021eb75b6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
@@ -20,11 +20,11 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.response.UpdateResponse;
 import org.apache.solr.common.SolrInputDocument;
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java
index b499b80961e..27e79cb1fc2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrQueryResponse.java
@@ -5,8 +5,8 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.solr.client.solrj.SolrQuery;
 import org.apache.solr.client.solrj.response.FacetField;
 
@@ -26,6 +26,7 @@ public class SolrQueryResponse {
     private String error;
     private Map<String, Long> dvObjectCounts = new HashMap<>();
     private Map<String, Long> publicationStatusCounts = new HashMap<>();
+    private boolean solrTemporarilyUnavailable = false;
 
     public static String DATAVERSES_COUNT_KEY = "dataverses_count";
     public static String DATASETS_COUNT_KEY = "datasets_count";
@@ -91,7 +92,14 @@ public JsonObjectBuilder getPublicationStatusCountsAsJSON(){
         }
         return this.getMapCountsAsJSON(publicationStatusCounts);
     }
-       
+    
+    public boolean isSolrTemporarilyUnavailable() {
+        return solrTemporarilyUnavailable;
+    }
+    
+    public void setSolrTemporarilyUnavailable(boolean solrTemporarilyUnavailable) {
+        this.solrTemporarilyUnavailable = solrTemporarilyUnavailable;
+    }
     
     public JsonObjectBuilder getDvObjectCountsAsJSON(){
         
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
index 59fcd1d8503..6ad7f9dbbf6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrSearchResult.java
@@ -9,10 +9,10 @@
 import java.util.Map;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
 import org.apache.commons.collections4.CollectionUtils;
 
@@ -22,6 +22,7 @@
 import edu.harvard.iq.dataverse.DatasetRelPublication;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.MetadataBlock;
 import edu.harvard.iq.dataverse.api.Util;
 import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
@@ -122,6 +123,8 @@ public class SolrSearchResult {
 
 	private Long embargoEndDate;
 
+	private boolean datasetValid;
+
 	public String getDvTree() {
 		return dvTree;
 	}
@@ -422,6 +425,7 @@ public JsonObjectBuilder getJsonForMyData() {
 				.add("is_draft_state", this.isDraftState()).add("is_in_review_state", this.isInReviewState())
 				.add("is_unpublished_state", this.isUnpublishedState()).add("is_published", this.isPublishedState())
 				.add("is_deaccesioned", this.isDeaccessionedState())
+				.add("is_valid", this.isValid())
 				.add("date_to_display_on_card", getDateToDisplayOnCard());
 
 		// Add is_deaccessioned attribute, even though MyData currently screens any deaccessioned info out
@@ -1129,9 +1133,10 @@ public String getFileUrl() {
 		 * if (entity != null && entity instanceof DataFile && this.isHarvested()) { String remoteArchiveUrl = ((DataFile) entity).getRemoteArchiveURL(); if
 		 * (remoteArchiveUrl != null) { return remoteArchiveUrl; } return null; }
 		 */
-		if (entity.getIdentifier() != null) {
-			return "/file.xhtml?persistentId=" + entity.getGlobalIdString();
-		}
+        if (entity.getIdentifier() != null) {
+            GlobalId entityPid = entity.getGlobalId();
+            return "/file.xhtml?persistentId=" + ((entityPid != null) ? entityPid.asString() : null);
+        }
 
 		return "/file.xhtml?fileId=" + entity.getId() + "&datasetVersionId=" + datasetVersionId;
 
@@ -1255,4 +1260,12 @@ public Long getEmbargoEndDate() {
 	public void setEmbargoEndDate(Long embargoEndDate) {
 		this.embargoEndDate = embargoEndDate;
 	}
+
+	public void setDatasetValid(Boolean datasetValid) {
+		this.datasetValid = datasetValid == null || Boolean.valueOf(datasetValid);
+	}
+
+	public boolean isValid() {
+		return datasetValid;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java
index 66bb63ed596..ff4a2e4aa96 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearch.java
@@ -5,16 +5,16 @@
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.List;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.OneToMany;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.Table;
 
 @Entity
 @Table(indexes = {@Index(columnList="definitionpoint_id")
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java
index f884a9529c9..7f51f9c8728 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchFilterQuery.java
@@ -1,16 +1,16 @@
 package edu.harvard.iq.dataverse.search.savedsearch;
 
 import java.io.Serializable;
-import javax.persistence.CascadeType;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.Index;
-import javax.persistence.JoinColumn;
-import javax.persistence.ManyToOne;
-import javax.persistence.Table;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.Table;
 
 @Entity
 @Table(indexes = {@Index(columnList="savedsearch_id")})
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
index 587e054dc4a..7fc2bdf79a3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/savedsearch/SavedSearchServiceBean.java
@@ -21,24 +21,23 @@
 import edu.harvard.iq.dataverse.search.SortBy;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.EJB;
-import javax.ejb.Schedule;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
-import javax.persistence.EntityManager;
-import javax.persistence.NoResultException;
-import javax.persistence.NonUniqueResultException;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Schedule;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
+import jakarta.servlet.http.HttpServletRequest;
 
 @Stateless
 @Named
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java
new file mode 100644
index 00000000000..a2c3f53d59d
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java
@@ -0,0 +1,81 @@
+package edu.harvard.iq.dataverse.settings;
+
+import edu.harvard.iq.dataverse.util.FileUtil;
+
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.DependsOn;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
+import java.io.IOException;
+import java.nio.file.FileSystemException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+@Startup
+@Singleton
+@DependsOn("StartupFlywayMigrator")
+public class ConfigCheckService {
+    
+    private static final Logger logger = Logger.getLogger(ConfigCheckService.class.getCanonicalName());
+
+    public static class ConfigurationError extends RuntimeException {
+        public ConfigurationError(String message) {
+            super(message);
+        }
+    }
+    
+    @PostConstruct
+    public void startup() {
+        if (!checkSystemDirectories()) {
+            throw new ConfigurationError("Not all configuration checks passed successfully. See logs above.");
+        }
+    }
+    
+    /**
+     * In this method, we check the existence and write-ability of all important directories we use during
+     * normal operations. It does not include checks for the storage system. If directories are not available,
+     * try to create them (and fail when not allowed to).
+     *
+     * @return True if all checks successful, false otherwise.
+     */
+    public boolean checkSystemDirectories() {
+        Map<Path, String> paths = Map.of(
+                Path.of(JvmSettings.UPLOADS_DIRECTORY.lookup()), "temporary JSF upload space (see " + JvmSettings.UPLOADS_DIRECTORY.getScopedKey() + ")",
+                Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")",
+                Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup()), "docroot space (see " + JvmSettings.DOCROOT_DIRECTORY.getScopedKey() + ")");
+        
+        boolean success = true;
+        for (Path path : paths.keySet()) {
+            // Check if the configured path is absolute - avoid potential problems with relative paths this way
+            if (! path.isAbsolute()) {
+                logger.log(Level.SEVERE, () -> "Configured directory " + path + " for " + paths.get(path) + " is not absolute");
+                success = false;
+                continue;
+            }
+            
+            if (! Files.exists(path)) {
+                try {
+                    Files.createDirectories(path);
+                } catch (IOException e) {
+                    String details;
+                    if (e instanceof FileSystemException) {
+                        details = ": " + e.getClass();
+                    } else {
+                        details = "";
+                    }
+                    
+                    logger.log(Level.SEVERE, () -> "Could not create directory " + path + " for " + paths.get(path) + details);
+                    success = false;
+                }
+            } else if (!Files.isWritable(path)) {
+                logger.log(Level.SEVERE, () -> "Directory " + path + " for " + paths.get(path) + " exists, but is not writeable");
+                success = false;
+            }
+        }
+        return success;
+    }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
new file mode 100644
index 00000000000..afa5a1c986a
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
@@ -0,0 +1,75 @@
+package edu.harvard.iq.dataverse.settings;
+
+import java.util.Objects;
+
+/**
+ * <p>
+ *     This enum holds so-called "feature flags" aka "feature gates", etc. It can be used throughout the application
+ *     to avoid activating or using experimental functionality or feature previews that are opt-in.
+ * </p><p>
+ *     The current implementation reuses {@link JvmSettings} to interpret any
+ *     <a href="https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#_built_in_converters">boolean values</a>
+ *     (true == case-insensitive one of "true", "1", "YES", "Y", "ON") and hook into the usual settings system
+ *     (any MicroProfile Config Source available).
+ * </p><p>
+ *     If you add any new flags, please add it here, think of a default status, add some Javadocs about the flagged
+ *     feature and add a "@since" tag to make it easier to identify when a flag has been introduced.
+ * </p><p>
+ *     When removing a flag because of a feature being removed, drop the entry. When a feature matures, drop the flag,
+ *     too! Flags are not meant to be switches for configuration!
+ * </p>
+ *
+ * @see <a href="https://guides.dataverse.org/en/latest/installation/config.html#feature-flags">Configuration Guide</a>
+ * @see <a href="https://guides.dataverse.org/en/latest/developers/configuration.html#adding-a-feature-flag">Developer Guide</a>
+ */
+public enum FeatureFlags {
+
+    /**
+     * Enables API authentication via session cookie (JSESSIONID). Caution: Enabling this feature flag exposes the installation to CSRF risks
+     * @apiNote Raise flag by setting "dataverse.feature.api-session-auth"
+     * @since Dataverse 5.14
+     */
+    API_SESSION_AUTH("api-session-auth"),
+    /**
+     * Enables API authentication via Bearer Token.
+     * @apiNote Raise flag by setting "dataverse.feature.api-bearer-auth"
+     * @since Dataverse @TODO:
+     */
+    API_BEARER_AUTH("api-bearer-auth"),
+    ;
+    
+    final String flag;
+    final boolean defaultStatus;
+    
+    /**
+     * Construct a flag with default status "off".
+     *
+     * @param flag This flag name will be used to create a scoped String with {@link JvmSettings#FEATURE_FLAG},
+     *             making it available as "dataverse.feature.${flag}".
+     */
+    FeatureFlags(String flag) {
+        this(flag, false);
+    }
+    
+    /**
+     * Construct a flag.
+     * @param flag This flag name will be used to create a scoped String with {@link JvmSettings#FEATURE_FLAG},
+     *             making it available as "dataverse.feature.${flag}".
+     * @param defaultStatus A sensible default should be given here. Probably this will be "false" for most
+     *                      experimental feature previews.
+     */
+    FeatureFlags(String flag, boolean defaultStatus) {
+        Objects.requireNonNull(flag);
+        this.flag = flag;
+        this.defaultStatus = defaultStatus;
+    }
+    
+    /**
+     * Determine the status of this flag via {@link JvmSettings}.
+     * @return True or false, depending on the configuration or {@link #defaultStatus} if not found.
+     */
+    public boolean enabled() {
+        return JvmSettings.FEATURE_FLAG.lookupOptional(Boolean.class, flag).orElse(defaultStatus);
+    }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java
index ed3a161075b..3bc06738a7e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java
@@ -47,6 +47,10 @@ public enum JvmSettings {
     // FILES SETTINGS
     SCOPE_FILES(PREFIX, "files"),
     FILES_DIRECTORY(SCOPE_FILES, "directory"),
+    UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"),
+    DOCROOT_DIRECTORY(SCOPE_FILES, "docroot"),
+    GUESTBOOK_AT_REQUEST(SCOPE_FILES, "guestbook-at-request"),
+    GLOBUS_CACHE_MAXAGE(SCOPE_FILES, "globus-cache-maxage"),
     
     // SOLR INDEX SETTINGS
     SCOPE_SOLR(PREFIX, "solr"),
@@ -67,9 +71,98 @@ public enum JvmSettings {
     // API SETTINGS
     SCOPE_API(PREFIX, "api"),
     API_SIGNING_SECRET(SCOPE_API, "signing-secret"),
+    API_ALLOW_INCOMPLETE_METADATA(SCOPE_API, "allow-incomplete-metadata"),
+
+    // SIGNPOSTING SETTINGS
+    SCOPE_SIGNPOSTING(PREFIX, "signposting"),
+    SIGNPOSTING_LEVEL1_AUTHOR_LIMIT(SCOPE_SIGNPOSTING, "level1-author-limit"),
+    SIGNPOSTING_LEVEL1_ITEM_LIMIT(SCOPE_SIGNPOSTING, "level1-item-limit"),
+
+    // FEATURE FLAGS SETTINGS
+    SCOPE_FLAGS(PREFIX, "feature"),
+    // This is a special placeholder-type setting entry, to be filled in by FeatureFlag entries during lookup.
+    // Avoids adding flag entries twice.
+    FEATURE_FLAG(SCOPE_FLAGS),
     
-    ;
+    // METADATA SETTINGS
+    SCOPE_METADATA(PREFIX, "metadata"),
+    MDB_SYSTEM_METADATA_KEYS(SCOPE_METADATA, "block-system-metadata-keys"),
+    MDB_SYSTEM_KEY_FOR(MDB_SYSTEM_METADATA_KEYS),
+
+    // PERSISTENT IDENTIFIER SETTINGS
+    SCOPE_PID(PREFIX, "pid"),
+    
+    // PROVIDER EZID (legacy) - these settings were formerly kept together with DataCite ones
+    SCOPE_PID_EZID(SCOPE_PID, "ezid"),
+    EZID_API_URL(SCOPE_PID_EZID, "api-url", "doi.baseurlstring"),
+    EZID_USERNAME(SCOPE_PID_EZID, "username", "doi.username"),
+    EZID_PASSWORD(SCOPE_PID_EZID, "password", "doi.password"),
+    
+    // PROVIDER DATACITE
+    SCOPE_PID_DATACITE(SCOPE_PID, "datacite"),
+    DATACITE_MDS_API_URL(SCOPE_PID_DATACITE, "mds-api-url", "doi.baseurlstring"),
+    DATACITE_REST_API_URL(SCOPE_PID_DATACITE, "rest-api-url", "doi.dataciterestapiurlstring", "doi.mdcbaseurlstring"),
+    DATACITE_USERNAME(SCOPE_PID_DATACITE, "username", "doi.username"),
+    DATACITE_PASSWORD(SCOPE_PID_DATACITE, "password", "doi.password"),
+    
+    // PROVIDER PERMALINK
+    SCOPE_PID_PERMALINK(SCOPE_PID, "permalink"),
+    PERMALINK_BASEURL(SCOPE_PID_PERMALINK, "base-url", "perma.baseurlstring"),
+    
+    // PROVIDER HANDLE
+    SCOPE_PID_HANDLENET(SCOPE_PID, "handlenet"),
+    HANDLENET_INDEX(SCOPE_PID_HANDLENET, "index", "dataverse.handlenet.index"),
+    SCOPE_PID_HANDLENET_KEY(SCOPE_PID_HANDLENET, "key"),
+    HANDLENET_KEY_PATH(SCOPE_PID_HANDLENET_KEY, "path", "dataverse.handlenet.admcredfile"),
+    HANDLENET_KEY_PASSPHRASE(SCOPE_PID_HANDLENET_KEY, "passphrase", "dataverse.handlenet.admprivphrase"),
+
+    // SPI SETTINGS
+    SCOPE_SPI(PREFIX, "spi"),
+    SCOPE_EXPORTERS(SCOPE_SPI, "exporters"),
+    EXPORTERS_DIRECTORY(SCOPE_EXPORTERS, "directory"),
+    
+    // MAIL SETTINGS
+    SCOPE_MAIL(PREFIX, "mail"),
+    SUPPORT_EMAIL(SCOPE_MAIL, "support-email"),
+    CC_SUPPORT_ON_CONTACT_EMAIL(SCOPE_MAIL, "cc-support-on-contact-email"),
     
+    // AUTH SETTINGS
+    SCOPE_AUTH(PREFIX, "auth"),
+    // AUTH: OIDC SETTINGS
+    SCOPE_OIDC(SCOPE_AUTH, "oidc"),
+    OIDC_ENABLED(SCOPE_OIDC, "enabled"),
+    OIDC_TITLE(SCOPE_OIDC, "title"),
+    OIDC_SUBTITLE(SCOPE_OIDC, "subtitle"),
+    OIDC_AUTH_SERVER_URL(SCOPE_OIDC, "auth-server-url"),
+    OIDC_CLIENT_ID(SCOPE_OIDC, "client-id"),
+    OIDC_CLIENT_SECRET(SCOPE_OIDC, "client-secret"),
+    SCOPE_OIDC_PKCE(SCOPE_OIDC, "pkce"),
+    OIDC_PKCE_ENABLED(SCOPE_OIDC_PKCE, "enabled"),
+    OIDC_PKCE_METHOD(SCOPE_OIDC_PKCE, "method"),
+    OIDC_PKCE_CACHE_MAXSIZE(SCOPE_OIDC_PKCE, "max-cache-size"),
+    OIDC_PKCE_CACHE_MAXAGE(SCOPE_OIDC_PKCE, "max-cache-age"),
+
+    // UI SETTINGS
+    SCOPE_UI(PREFIX, "ui"),
+    UI_ALLOW_REVIEW_INCOMPLETE(SCOPE_UI, "allow-review-for-incomplete"),
+    UI_SHOW_VALIDITY_FILTER(SCOPE_UI, "show-validity-filter"),
+
+    // NetCDF SETTINGS
+    SCOPE_NETCDF(PREFIX, "netcdf"),
+    GEO_EXTRACT_S3_DIRECT_UPLOAD(SCOPE_NETCDF, "geo-extract-s3-direct-upload"),
+
+    // BAGIT SETTINGS
+    SCOPE_BAGIT(PREFIX, "bagit"),
+    SCOPE_BAGIT_SOURCEORG(SCOPE_BAGIT, "sourceorg"),
+    BAGIT_SOURCE_ORG_NAME(SCOPE_BAGIT_SOURCEORG, "name"),
+    BAGIT_SOURCEORG_ADDRESS(SCOPE_BAGIT_SOURCEORG, "address"),
+    BAGIT_SOURCEORG_EMAIL(SCOPE_BAGIT_SOURCEORG, "email"),
+
+    // STORAGE USE SETTINGS
+    SCOPE_STORAGEUSE(PREFIX, "storageuse"),
+    STORAGEUSE_DISABLE_UPDATES(SCOPE_STORAGEUSE, "disable-storageuse-increments"),
+    ;
+
     private static final String SCOPE_SEPARATOR = ".";
     public static final String PLACEHOLDER_KEY = "%s";
     private static final Pattern OLD_NAME_PLACEHOLDER_PATTERN = Pattern.compile("%(\\d\\$)?s");
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java
index 160ed693eee..b1910a2fbb5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/Setting.java
@@ -2,13 +2,13 @@
 
 import java.io.Serializable;
 import java.util.Objects;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.Id;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.Id;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
 
 /**
  * A single value in the config of dataverse.
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
index d84e18d5931..627cef08d8b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java
@@ -4,23 +4,20 @@
 import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean;
 import edu.harvard.iq.dataverse.api.ApiBlockingFilter;
 import edu.harvard.iq.dataverse.util.StringUtil;
-
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.json.JsonValue;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonValue;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
 
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
 
-import java.io.StringReader;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -406,9 +403,10 @@ Whether Harvesting (OAI) service is enabled
          */
         InheritParentRoleAssignments,
         
-        /** Make Data Count Logging and Display */
+        /** Make Data Count Logging, Display, and Start Date */
         MDCLogPath, 
         DisplayMDCMetrics,
+        MDCStartDate,
 
         /**
          * Allow CORS flag (true or false). It is true by default
@@ -463,18 +461,6 @@ Whether Harvesting (OAI) service is enabled
          */
         ExportInstallationAsDistributorOnlyWhenNotSet,
 
-        /**
-         * Basic Globus Token for Globus Application
-         */
-        GlobusBasicToken,
-        /**
-         * GlobusEndpoint is Globus endpoint for Globus application
-         */
-        GlobusEndpoint,
-        /** 
-         * Comma separated list of Globus enabled stores
-         */
-        GlobusStores,
         /** Globus App URL
          * 
          */
@@ -565,6 +551,7 @@ Whether Harvesting (OAI) service is enabled
          * LDN Inbox Allowed Hosts - a comma separated list of IP addresses allowed to submit messages to the inbox
          */
         LDNMessageHosts,
+
         /*
          * Allow a custom JavaScript to control values of specific fields.
          */
@@ -576,8 +563,42 @@ Whether Harvesting (OAI) service is enabled
         /**
          * The URL for the DvWebLoader tool (see github.com/gdcc/dvwebloader for details)
          */
-        WebloaderUrl
+        WebloaderUrl, 
+        /**
+         * Enforce storage quotas:
+         */
+        UseStorageQuotas, 
+        /** 
+         * Placeholder storage quota (defines the same quota setting for every user; used to test the concept of a quota.
+         */
+        StorageQuotaSizeInBytes,
 
+        /**
+         * A comma-separated list of CategoryName in the desired order for files to be
+         * sorted in the file table display. If not set, files will be sorted
+         * alphabetically by default. If set, files will be sorted by these categories
+         * and alphabetically within each category.
+         */
+        CategoryOrder,
+        /**
+         * True(default)/false option deciding whether ordering by folder should be applied to the 
+         * dataset listing of datafiles.
+         */
+        OrderByFolder,
+        /**
+         * True/false(default) option deciding whether the dataset file table display should include checkboxes
+         * allowing users to dynamically turn folder and category ordering on/off.
+         */
+        AllowUserManagementOfOrder,
+        /*
+         * True/false(default) option deciding whether file PIDs can be enabled per collection - using the Dataverse/collection set attribute API call.
+         */
+        AllowEnablingFilePIDsPerCollection,
+        /**
+         * Allows an instance admin to disable Solr search facets on the collection
+         * and dataset pages instantly
+         */
+        DisableSolrFacets
         ;
 
         @Override
@@ -665,8 +686,8 @@ public Long getValueForCompoundKeyAsLong(Key key, String param){
     	   try {
     		   return Long.parseLong(val);
     	   } catch (NumberFormatException ex) {
-    		   try ( StringReader rdr = new StringReader(val) ) {
-    			   JsonObject settings = Json.createReader(rdr).readObject();
+    		   try {
+    			   JsonObject settings = JsonUtil.getJsonObject(val);
     			   if(settings.containsKey(param)) {
     				   return Long.parseLong(settings.getString(param));
     			   } else if(settings.containsKey("default")) {
@@ -699,8 +720,8 @@ public Boolean getValueForCompoundKeyAsBoolean(Key key, String param) {
             return null;
         }
 
-        try (StringReader rdr = new StringReader(val)) {
-            JsonObject settings = Json.createReader(rdr).readObject();
+        try {
+            JsonObject settings = JsonUtil.getJsonObject(val);
             if (settings.containsKey(param)) {
                 return Boolean.parseBoolean(settings.getString(param));
             } else if (settings.containsKey("default")) {
@@ -866,8 +887,7 @@ public Map<String, String> getBaseMetadataLanguageMap(Map<String,String> languag
             if(mlString.isEmpty()) {
                 mlString="[]";
             }
-            JsonReader jsonReader = Json.createReader(new StringReader(mlString));
-            JsonArray languages = jsonReader.readArray();
+            JsonArray languages = JsonUtil.getJsonArray(mlString);
             for(JsonValue jv: languages) {
                 JsonObject lang = (JsonObject) jv;
                 languageMap.put(lang.getString("locale"), lang.getString("title"));
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/source/AliasConfigSource.java b/src/main/java/edu/harvard/iq/dataverse/settings/source/AliasConfigSource.java
index 407f39ce0f9..970da323de3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/source/AliasConfigSource.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/source/AliasConfigSource.java
@@ -108,9 +108,10 @@ public Set<String> getPropertyNames() {
     
     @Override
     public int getOrdinal() {
-        // Any other config source can override us.
-        // As soon as someone is starting to use the new property name, this alias becomes pointless.
-        return Integer.MIN_VALUE;
+        // Any other config source can override us, except the microprofile-config.properties source (ordinal = 100)
+        // We use *the same* ordinal (which is also the default ordinal). As our source is named "Alias",
+        // it will be served first as "A" = 65 comes before other letters (uses String::compareTo).
+        return ConfigSource.DEFAULT_ORDINAL;
     }
     
     @Override
diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java
index 14db98e540e..a51acd1f54f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapServiceBean.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.Dataverse;
 import java.util.List;
-import javax.ejb.Asynchronous;
-import javax.ejb.Stateless;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.Stateless;
 
 @Stateless
 public class SiteMapServiceBean {
diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java
index e32b811ee2c..86ae697f771 100644
--- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java
@@ -3,6 +3,8 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObjectContainer;
+import edu.harvard.iq.dataverse.settings.ConfigCheckService;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.util.xml.XmlValidator;
 import java.io.File;
@@ -210,16 +212,17 @@ public static boolean stageFileExists() {
         }
         return false;
     }
-
+    
+    /**
+     * Lookup the location where to generate the sitemap.
+     *
+     * Note: the location is checked to be configured, does exist and is writeable in
+     * {@link ConfigCheckService#checkSystemDirectories()}
+     *
+     * @return Sitemap storage location ([docroot]/sitemap)
+     */
     private static String getSitemapPathString() {
-        String sitemapPathString = "/tmp";
-        // i.e. /usr/local/glassfish4/glassfish/domains/domain1
-        String domainRoot = System.getProperty("com.sun.aas.instanceRoot");
-        if (domainRoot != null) {
-            // Note that we write to a directory called "sitemap" but we serve just "/sitemap.xml" using PrettyFaces.
-            sitemapPathString = domainRoot + File.separator + "docroot" + File.separator + "sitemap";
-        }
-        return sitemapPathString;
+        return JvmSettings.DOCROOT_DIRECTORY.lookup() + File.separator + "sitemap";
 
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java
new file mode 100644
index 00000000000..d00f7041e61
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageQuota.java
@@ -0,0 +1,116 @@
+package edu.harvard.iq.dataverse.storageuse;
+
+import edu.harvard.iq.dataverse.DvObject;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.OneToOne;
+import java.io.Serializable;
+import java.util.logging.Logger;
+
+//import jakarta.persistence.*;
+
+/**
+ *
+ * @author landreev
+ * 
+ */
+@Entity
+public class StorageQuota implements Serializable {
+    private static final Logger logger = Logger.getLogger(StorageQuota.class.getCanonicalName()); 
+    
+    /**
+     * Only Collection quotas are supported, for now
+     */
+    
+    private static final long serialVersionUID = 1L;
+    @Id
+    @GeneratedValue(strategy = GenerationType.IDENTITY)
+    private Long id;
+
+    public Long getId() {
+        return id;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+    
+    /**
+     * For defining quotas for Users and/or Groups 
+     * (Not supported as of yet)
+     
+    @Column(nullable = true)
+    private String assigneeIdentifier;
+    */
+    
+    /**
+     * Could be changed to ManyToOne - if we wanted to be able to define separate
+     * quotas on the same collection for different users. (?)
+     * Whether we actually want to support the above is TBD. (possibly not)
+     * Only collection-wide quotas are supported for now. 
+     */
+    @OneToOne
+    @JoinColumn(name="definitionPoint_id", nullable=true)
+    private DvObject definitionPoint;
+    
+    @Column(nullable = true)
+    private Long allocation; 
+    
+    public StorageQuota() {}
+    
+    /**
+     * Could be uncommented if/when we want to add per-user quotas (see above)
+    public String getAssigneeIdentifier() {
+        return assigneeIdentifier;
+    }
+
+    public void setAssigneeIdentifier(String assigneeIdentifier) {
+        this.assigneeIdentifier = assigneeIdentifier;
+    }*/
+    
+    public DvObject getDefinitionPoint() {
+        return definitionPoint;
+    }
+
+    public void setDefinitionPoint(DvObject definitionPoint) {
+        this.definitionPoint = definitionPoint;
+    }
+    
+    public Long getAllocation() {
+        return allocation; 
+    }
+    
+    public void setAllocation(Long allocation) {
+        this.allocation = allocation; 
+    }
+    
+    @Override
+    public int hashCode() {
+        int hash = 0;
+        hash += (id != null ? id.hashCode() : 0);
+        return hash;
+    }
+
+    @Override
+    public boolean equals(Object object) {
+        // TODO: Warning - this method won't work in the case the id fields are not set
+        if (!(object instanceof StorageQuota)) {
+            return false;
+        }
+        StorageQuota other = (StorageQuota) object;
+        if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public String toString() {
+        return "edu.harvard.iq.dataverse.storageuse.StorageQuota[ id=" + id + " ]";
+    }
+    
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java
new file mode 100644
index 00000000000..b777736dc8d
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUse.java
@@ -0,0 +1,100 @@
+package edu.harvard.iq.dataverse.storageuse;
+
+import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.DvObjectContainer;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.Id;
+import jakarta.persistence.Index;
+import jakarta.persistence.JoinColumn;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
+import jakarta.persistence.Table;
+import java.io.Serializable;
+
+/**
+ *
+ * @author landreev
+ */
+@NamedQueries({
+    @NamedQuery(name = "StorageUse.findByteSizeByDvContainerId",query = "SELECT su.sizeInBytes FROM StorageUse su WHERE su.dvObjectContainer.id =:dvObjectId "),
+    @NamedQuery(name = "StorageUse.findByDvContainerId",query = "SELECT su FROM StorageUse su WHERE su.dvObjectContainer.id =:dvObjectId "),
+    @NamedQuery(name = "StorageUse.incrementByteSizeByDvContainerId", query = "UPDATE StorageUse su SET su.sizeInBytes = su.sizeInBytes +:fileSize WHERE su.dvObjectContainer.id =:dvObjectId")
+})
+@Entity
+@Table(indexes = {@Index(columnList="dvobjectcontainer_id")})
+public class StorageUse implements Serializable {
+
+    private static final long serialVersionUID = 1L;
+    @Id
+    @GeneratedValue(strategy = GenerationType.IDENTITY)
+    private Long id;
+
+    public Long getId() {
+        return id;
+    }
+
+    public void setId(Long id) {
+        this.id = id;
+    }
+
+    @OneToOne
+    @JoinColumn(nullable=false)
+    private DvObject dvObjectContainer; 
+    
+    @Column
+    private Long sizeInBytes = null; 
+    
+    public StorageUse() {}
+    
+    public StorageUse(DvObjectContainer dvObjectContainer) {
+        this(dvObjectContainer, 0L);
+    }
+    
+    public StorageUse(DvObjectContainer dvObjectContainer, Long sizeInBytes) {
+        this.dvObjectContainer = dvObjectContainer;
+        this.sizeInBytes = sizeInBytes;
+    }
+    
+    public Long getSizeInBytes() {
+        return sizeInBytes; 
+    }
+    
+    public void setSizeInBytes(Long sizeInBytes) {
+        this.sizeInBytes = sizeInBytes;
+    }
+    
+    public void incrementSizeInBytes(Long sizeInBytes) {
+        this.sizeInBytes += sizeInBytes; 
+    }
+    
+    
+    @Override
+    public int hashCode() {
+        int hash = 0;
+        hash += (id != null ? id.hashCode() : 0);
+        return hash;
+    }
+
+    @Override
+    public boolean equals(Object object) {
+        // TODO: Warning - this method won't work in the case the id fields are not set
+        if (!(object instanceof StorageUse)) {
+            return false;
+        }
+        StorageUse other = (StorageUse) object;
+        if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
+            return false;
+        }
+        return true;
+    }
+
+    @Override
+    public String toString() {
+        return "edu.harvard.iq.dataverse.storageuse.StorageUse[ id=" + id + " ]";
+    }
+    
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java
new file mode 100644
index 00000000000..7aea7a7b596
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/StorageUseServiceBean.java
@@ -0,0 +1,72 @@
+package edu.harvard.iq.dataverse.storageuse;
+
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Named;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import java.util.Optional;
+import java.util.logging.Logger;
+
+/**
+ *
+ * @author landreev
+ */
+@Stateless
+@Named
+public class StorageUseServiceBean  implements java.io.Serializable {
+    private static final Logger logger = Logger.getLogger(StorageUseServiceBean.class.getCanonicalName());
+    
+    @PersistenceContext(unitName = "VDCNet-ejbPU")
+    private EntityManager em;
+    
+    public StorageUse findByDvContainerId(Long dvObjectId) {
+        return em.createNamedQuery("StorageUse.findByDvContainerId", StorageUse.class).setParameter("dvObjectId", dvObjectId).getSingleResult();
+    }
+    
+    /**
+     * Looks up the current storage use size, using a named query in a new 
+     * transaction
+     * @param dvObjectId
+     * @return 
+     */
+    @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
+    public Long findStorageSizeByDvContainerId(Long dvObjectId) {
+        Long res = em.createNamedQuery("StorageUse.findByteSizeByDvContainerId", Long.class).setParameter("dvObjectId", dvObjectId).getSingleResult();
+        return res == null ? 0L : res;
+    }
+    
+    /**
+     * Increments the recorded storage size for all the dvobject parents of a
+     * datafile, recursively. 
+     * @param dvObjectContainerId database id of the immediate parent (dataset)
+     * @param increment size in bytes of the file(s) being added 
+     */
+    @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
+    public void incrementStorageSizeRecursively(Long dvObjectContainerId, Long increment) {
+        if (dvObjectContainerId != null && increment != null) {
+            Optional<Boolean> allow = JvmSettings.STORAGEUSE_DISABLE_UPDATES.lookupOptional(Boolean.class);
+            if (!(allow.isPresent() && allow.get())) {
+                String queryString = "WITH RECURSIVE uptree (id, owner_id) AS\n"
+                        + "("
+                        + "    SELECT id, owner_id\n"
+                        + "    FROM dvobject\n"
+                        + "    WHERE id=" + dvObjectContainerId + "\n"
+                        + "    UNION ALL\n"
+                        + "    SELECT dvobject.id, dvobject.owner_id\n"
+                        + "    FROM dvobject\n"
+                        + "    JOIN uptree ON dvobject.id = uptree.owner_id)\n"
+                        + "UPDATE storageuse SET sizeinbytes=COALESCE(sizeinbytes,0)+" + increment + "\n"
+                        + "FROM uptree\n"
+                        + "WHERE dvobjectcontainer_id = uptree.id;";
+
+                int parentsUpdated = em.createNativeQuery(queryString).executeUpdate();
+            }
+        }
+        // @todo throw an exception if the number of parent dvobjects updated by
+        // the query is < 2 - ? 
+    }
+    
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/storageuse/UploadSessionQuotaLimit.java b/src/main/java/edu/harvard/iq/dataverse/storageuse/UploadSessionQuotaLimit.java
new file mode 100644
index 00000000000..f7dac52e886
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/storageuse/UploadSessionQuotaLimit.java
@@ -0,0 +1,38 @@
+package edu.harvard.iq.dataverse.storageuse;
+
+/**
+ *
+ * @author landreev
+ */
+public class UploadSessionQuotaLimit {
+        private Long totalAllocatedInBytes = 0L; 
+        private Long totalUsageInBytes = 0L;
+        
+        public UploadSessionQuotaLimit(Long allocated, Long used) {
+            this.totalAllocatedInBytes = allocated;
+            this.totalUsageInBytes = used; 
+        }
+        
+        public Long getTotalAllocatedInBytes() {
+            return totalAllocatedInBytes;
+        }
+        
+        public void setTotalAllocatedInBytes(Long totalAllocatedInBytes) {
+            this.totalAllocatedInBytes = totalAllocatedInBytes;
+        }
+        
+        public Long getTotalUsageInBytes() {
+            return totalUsageInBytes;
+        }
+        
+        public void setTotalUsageInBytes(Long totalUsageInBytes) {
+            this.totalUsageInBytes = totalUsageInBytes;
+        }
+        
+        public Long getRemainingQuotaInBytes() {
+            if (totalUsageInBytes > totalAllocatedInBytes) {
+                return 0L; 
+            }
+            return totalAllocatedInBytes - totalUsageInBytes;
+        }
+    }
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java
index b132bff9429..6eb3a8df0bc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/timer/DataverseTimerServiceBean.java
@@ -25,17 +25,17 @@
 import java.util.Iterator;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.annotation.PostConstruct;
-import javax.annotation.Resource;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
-import javax.ejb.Timeout;
-import javax.ejb.Timer;
-import javax.ejb.TimerConfig;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.annotation.PostConstruct;
+import jakarta.annotation.Resource;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
+import jakarta.ejb.Timeout;
+import jakarta.ejb.Timer;
+import jakarta.ejb.TimerConfig;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.servlet.http.HttpServletRequest;
 
 
 /**
@@ -55,7 +55,7 @@ public class DataverseTimerServiceBean implements Serializable {
     private static final Logger logger = Logger.getLogger("edu.harvard.iq.dataverse.timer.DataverseTimerServiceBean");
     
     @Resource
-    javax.ejb.TimerService timerService;
+    jakarta.ejb.TimerService timerService;
     @EJB
     HarvesterServiceBean harvesterService;
     @EJB
@@ -109,7 +109,7 @@ public void createTimer(Date initialExpiration, long intervalDuration, Serializa
      */
     @Timeout
     @TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
-    public void handleTimeout(javax.ejb.Timer timer) {
+    public void handleTimeout(jakarta.ejb.Timer timer) {
         // We have to put all the code in a try/catch block because
         // if an exception is thrown from this method, Glassfish will automatically
         // call the method a second time. (The minimum number of re-tries for a Timer method is 1)
diff --git a/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java b/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java
index 07937638607..5e7f532d2ac 100644
--- a/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java
+++ b/src/main/java/edu/harvard/iq/dataverse/userdata/UserListResult.java
@@ -7,14 +7,13 @@
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.mydata.Pager;
-import edu.harvard.iq.dataverse.util.BundleUtil;
+
 import java.util.ArrayList;
 import java.util.List;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
index c143dbaa76f..922e6ff5d28 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/BundleUtil.java
@@ -12,7 +12,7 @@
 import java.util.logging.Logger;
 import java.util.Map;
 import java.util.HashMap;
-import javax.faces.context.FacesContext;
+import jakarta.faces.context.FacesContext;
 
 public class BundleUtil {
 
@@ -23,7 +23,19 @@ public class BundleUtil {
     private static final Map<String, ClassLoader> classLoaderCache = new HashMap<String, ClassLoader>();
 
     public static String getStringFromBundle(String key) {
-        return getStringFromBundle(key, null);
+        return getStringFromBundle(key, (List<String>)null);
+    }
+    
+    public static String getStringFromBundle(String key, Locale locale) {
+        return getStringFromBundle(key, null, locale);
+    }
+
+    private static String getStringFromBundle(String key, List<String> arguments, Locale locale) {
+        ResourceBundle bundle = getResourceBundle(defaultBundleFile, locale);
+        if (bundle == null) {
+            return null;
+        }
+        return getStringFromBundle(key, arguments, bundle);
     }
 
     public static String getStringFromBundle(String key, List<String> arguments) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java
index 9c1c89430d5..d51f70229a9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/ClockUtil.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.util;
 
-import javax.enterprise.inject.Produces;
-import javax.inject.Qualifier;
-import javax.inject.Singleton;
+import jakarta.enterprise.inject.Produces;
+import jakarta.inject.Qualifier;
+import jakarta.inject.Singleton;
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java
index d2e59fac9f5..1910fde6489 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/ConstraintViolationUtil.java
@@ -5,8 +5,8 @@
  */
 package edu.harvard.iq.dataverse.util;
 
-import javax.validation.ConstraintViolation;
-import javax.validation.ConstraintViolationException;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.ConstraintViolationException;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/DataFileComparator.java b/src/main/java/edu/harvard/iq/dataverse/util/DataFileComparator.java
new file mode 100644
index 00000000000..8003fefd635
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/util/DataFileComparator.java
@@ -0,0 +1,129 @@
+package edu.harvard.iq.dataverse.util;
+
+
+import java.util.Comparator;
+import java.util.Date;
+import java.util.Map;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.DataFileCategory;
+import edu.harvard.iq.dataverse.FileMetadata;
+
+/**
+ *
+ * @author qqmyers
+ */
+public class DataFileComparator implements Comparator<FileMetadata> {
+
+    boolean byFolder = false;
+    boolean byCategory = false;
+    String field = "name";
+    boolean ascending = true;
+
+    public Comparator<FileMetadata> compareBy(boolean byFolder, boolean byCategory, String field, boolean ascending) {
+        this.byFolder = byFolder;
+        this.byCategory = byCategory;
+        if(StringUtil.nonEmpty(field)) {
+            this.field = field;
+        }
+        this.ascending = ascending;
+        return this;
+    }
+    
+    public boolean getByFolder() {
+        return this.byFolder;
+    }
+    public int getByCategory() {
+        return FileMetadata.getCategorySortOrder().size();
+    }
+    
+    public String getField() {
+        return this.field;
+    }
+    
+    public boolean getAsc() {
+        return this.ascending;
+    }
+
+    @SuppressWarnings({ "rawtypes", "unchecked" })
+    @Override
+    public int compare(FileMetadata o1, FileMetadata o2) {
+        if (byFolder) {
+            // Compare folders first
+            String folder1 = o1.getDirectoryLabel() == null ? "" : o1.getDirectoryLabel().toUpperCase();
+            String folder2 = o2.getDirectoryLabel() == null ? "" : o2.getDirectoryLabel().toUpperCase();
+
+            if ("".equals(folder1) && !"".equals(folder2)) {
+                return -1;
+            }
+
+            if ("".equals(folder2) && !"".equals(folder1)) {
+                return 1;
+            }
+
+            int comp = folder1.compareTo(folder2);
+            if (comp != 0) {
+                return comp;
+            }
+        }
+        Map<String,Long> categoryMap = FileMetadata.getCategorySortOrder();
+        
+        if (byCategory) {
+            // Then by category if set
+            if (categoryMap != null) {
+                long rank1 = Long.MAX_VALUE;
+                for (DataFileCategory c : o1.getCategories()) {
+                    Long rank = categoryMap.get(c.getName().toUpperCase());
+                    if (rank != null) {
+                        if (rank < rank1) {
+                            rank1 = rank;
+                        }
+                    }
+                }
+                long rank2 = Long.MAX_VALUE;
+                for (DataFileCategory c : o2.getCategories()) {
+                    Long rank = categoryMap.get(c.getName().toUpperCase());
+                    if (rank != null) {
+                        if (rank < rank2) {
+                            rank2 = rank;
+                        }
+                    }
+                }
+                if (rank1 != rank2) {
+                    return rank1 < rank2 ? -1 : 1;
+                }
+            }
+        }
+
+        // Folders are equal, no categories or category score is equal, so compare
+        // labels
+        Comparable file1 = null;
+        Comparable file2 = null;
+        switch (field) {
+        case "date":
+            file1 = getFileDateToCompare(o1);
+            file2 = getFileDateToCompare(o2);
+            break;
+        case "type":
+            file1 = StringUtil.isEmpty(o1.getDataFile().getFriendlyType()) ? "" : o1.getDataFile().getContentType();
+            file2 = StringUtil.isEmpty(o2.getDataFile().getFriendlyType()) ? "" : o2.getDataFile().getContentType();
+            break;
+        case "size":
+            file1 = new Long(o1.getDataFile().getFilesize());
+            file2 = new Long(o2.getDataFile().getFilesize());
+            break;
+        default: // "name" or not recognized
+            file1 = o1.getLabel().toUpperCase();
+            file2 = o2.getLabel().toUpperCase();
+
+        }
+        return (ascending ? file1.compareTo(file2) : file2.compareTo(file1));
+    }
+
+    private Date getFileDateToCompare(FileMetadata fileMetadata) {
+        DataFile datafile = fileMetadata.getDataFile();
+        if (datafile.isReleased()) {
+            return datafile.getPublicationDate();
+        }
+        return datafile.getCreateDate();
+    }
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java b/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java
index 800c05ae6dc..62cd318706f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/DataSourceProducer.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
-import javax.annotation.Resource;
-import javax.annotation.sql.DataSourceDefinition;
-import javax.enterprise.inject.Produces;
-import javax.inject.Singleton;
+import jakarta.annotation.Resource;
+import jakarta.annotation.sql.DataSourceDefinition;
+import jakarta.enterprise.inject.Produces;
+import jakarta.inject.Singleton;
 import javax.sql.DataSource;
 
-// Find docs here: https://javaee.github.io/javaee-spec/javadocs/javax/annotation/sql/DataSourceDefinition.html
+// Find docs here: https://jakarta.ee/specifications/annotations/2.1/apidocs/jakarta.annotation/jakarta/annotation/sql/datasourcedefinition
 @Singleton
 @DataSourceDefinition(
         name = "java:app/jdbc/dataverse",
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java b/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java
index df97998d9e8..25032860d11 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/DatasetFieldWalker.java
@@ -3,7 +3,6 @@
 import edu.harvard.iq.dataverse.ControlledVocabularyValue;
 import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
-import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetFieldValue;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -12,12 +11,10 @@
 import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 
 /**
  * A means of iterating over {@link DatasetField}s, or a collection of them.
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java
index cf337b0a020..fd8c8fa4a9c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/EjbUtil.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.util;
 
-import javax.ejb.EJBException;
+import jakarta.ejb.EJBException;
 
 public class EjbUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java
index c600abfd409..776d04e98cc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java
@@ -28,18 +28,22 @@
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.Embargo;
 import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import edu.harvard.iq.dataverse.dataaccess.S3AccessIO;
 import edu.harvard.iq.dataverse.dataset.DatasetThumbnail;
 import edu.harvard.iq.dataverse.dataset.DatasetUtil;
 import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException;
+
+import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT;
 import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable;
 import edu.harvard.iq.dataverse.ingest.IngestReport;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper;
 import edu.harvard.iq.dataverse.ingest.IngestableDataChecker;
 import edu.harvard.iq.dataverse.license.License;
+import edu.harvard.iq.dataverse.settings.ConfigCheckService;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.file.BagItFileHandler;
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
@@ -64,8 +68,6 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.nio.channels.FileChannel;
-import java.nio.channels.WritableByteChannel;
 import java.nio.charset.Charset;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -87,25 +89,22 @@
 import java.util.UUID;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.activation.MimetypesFileTypeMap;
-import javax.ejb.EJBException;
-import javax.enterprise.inject.spi.CDI;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
+import jakarta.activation.MimetypesFileTypeMap;
+import jakarta.ejb.EJBException;
+import jakarta.enterprise.inject.spi.CDI;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
 import javax.xml.stream.XMLStreamConstants;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
 
-import org.apache.commons.io.FileUtils;
 
 import java.util.zip.GZIPInputStream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
 import org.apache.commons.io.FilenameUtils;
 
 import edu.harvard.iq.dataverse.dataaccess.DataAccessOption;
 import edu.harvard.iq.dataverse.dataaccess.StorageIO;
-import edu.harvard.iq.dataverse.datasetutility.FileSizeChecker;
+import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException;
 import java.util.Arrays;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
@@ -180,6 +179,10 @@ public class FileUtil implements java.io.Serializable  {
     //Todo - this is the same as MIME_TYPE_TSV_ALT
     public static final String MIME_TYPE_INGESTED_FILE = "text/tab-separated-values";
 
+    public static final String MIME_TYPE_NETCDF = "application/netcdf";
+    public static final String MIME_TYPE_XNETCDF = "application/x-netcdf";
+    public static final String MIME_TYPE_HDF5 = "application/x-hdf5";
+
     // File type "thumbnail classes" tags:
     
     public static final String FILE_THUMBNAIL_CLASS_AUDIO = "audio";
@@ -246,26 +249,6 @@ public class FileUtil implements java.io.Serializable  {
     public FileUtil() {
     }
     
-    public static void copyFile(File inputFile, File outputFile) throws IOException {
-        FileChannel in = null;
-        WritableByteChannel out = null;
-        
-        try {
-            in = new FileInputStream(inputFile).getChannel();
-            out = new FileOutputStream(outputFile).getChannel();
-            long bytesPerIteration = 50000;
-            long start = 0;
-            while ( start < in.size() ) {
-                in.transferTo(start, bytesPerIteration, out);
-                start += bytesPerIteration;
-            }
-            
-        } finally {
-            if (in != null) { in.close(); }
-            if (out != null) { out.close(); }
-        }
-    }
-
    
     public static String getFileExtension(String fileName){
         String ext = null;
@@ -411,7 +394,7 @@ public static String getUserFriendlyOriginalType(DataFile dataFile) {
      *  Returns a content type string for a FileObject
      * 
      */
-    private static String determineContentType(File fileObject) {
+    public static String determineContentType(File fileObject) {
         if (fileObject==null){
             return null;
         }
@@ -471,7 +454,7 @@ public static String determineFileType(File f, String fileName) throws IOExcepti
             }
         }
 
-        // step 3: Check if NetCDF or HDF5
+        // step 3a: Check if NetCDF or HDF5
         if (fileType == null) {
             fileType = checkNetcdfOrHdf5(f);
         }
@@ -602,12 +585,11 @@ public static String lookupFileTypeFromPropertiesFile(String fileName) {
      * -- L.A. 4.0 alpha
     */
     private static boolean isFITSFile(File file) {
-        BufferedInputStream ins = null;
 
-        try {
-            ins = new BufferedInputStream(new FileInputStream(file));
+        try (BufferedInputStream ins = new BufferedInputStream(new FileInputStream(file))) {
             return isFITSFile(ins);
         } catch (IOException ex) {
+            logger.fine("IOException: "+ ex.getMessage());
         } 
         
         return false;
@@ -648,8 +630,9 @@ private static boolean isFITSFile(InputStream ins) {
     private static boolean isGraphMLFile(File file) {
         boolean isGraphML = false;
         logger.fine("begin isGraphMLFile()");
+        FileReader fileReader = null;
         try{
-            FileReader fileReader = new FileReader(file);
+            fileReader = new FileReader(file);
             javax.xml.stream.XMLInputFactory xmlif = javax.xml.stream.XMLInputFactory.newInstance();
             xmlif.setProperty("javax.xml.stream.isCoalescing", java.lang.Boolean.TRUE);
 
@@ -672,6 +655,14 @@ private static boolean isGraphMLFile(File file) {
             isGraphML = false;
         } catch(IOException e) {
             throw new EJBException(e);
+        } finally {
+            if (fileReader != null) {
+                try {
+                    fileReader.close();
+                } catch (IOException ioex) {
+                    logger.warning("IOException closing file reader in GraphML type checker");
+                }
+            }
         }
         logger.fine("end isGraphML()");
         return isGraphML;
@@ -803,436 +794,6 @@ public static String generateOriginalExtension(String fileType) {
         }
         return "";
     }
-    
-    public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream,
-            String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum,
-            SystemConfig systemConfig)  throws IOException {
-        ChecksumType checkSumType = DataFile.ChecksumType.MD5;
-        if (newStorageIdentifier == null) {
-            checkSumType = systemConfig.getFileFixityChecksumAlgorithm();
-        }
-        return createDataFiles(version, inputStream, fileName, suppliedContentType, newStorageIdentifier, newCheckSum, checkSumType, systemConfig);
-    }
-    
-    public static CreateDataFileResult createDataFiles(DatasetVersion version, InputStream inputStream, String fileName, String suppliedContentType, String newStorageIdentifier, String newCheckSum, ChecksumType newCheckSumType, SystemConfig systemConfig) throws IOException {
-        List<DataFile> datafiles = new ArrayList<>();
-
-        //When there is no checksum/checksumtype being sent (normal upload, needs to be calculated), set the type to the current default
-        if(newCheckSumType == null) {
-            newCheckSumType = systemConfig.getFileFixityChecksumAlgorithm();
-        }
-
-        String warningMessage = null;
-
-        // save the file, in the temporary location for now: 
-        Path tempFile = null;
-
-        Long fileSizeLimit = systemConfig.getMaxFileUploadSizeForStore(version.getDataset().getEffectiveStorageDriverId());
-        String finalType = null;
-        if (newStorageIdentifier == null) {
-            if (getFilesTempDirectory() != null) {
-                tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload");
-                // "temporary" location is the key here; this is why we are not using
-                // the DataStore framework for this - the assumption is that
-                // temp files will always be stored on the local filesystem.
-                // -- L.A. Jul. 2014
-                logger.fine("Will attempt to save the file as: " + tempFile.toString());
-                Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING);
-
-                // A file size check, before we do anything else:
-                // (note that "no size limit set" = "unlimited")
-                // (also note, that if this is a zip file, we'll be checking
-                // the size limit for each of the individual unpacked files)
-                Long fileSize = tempFile.toFile().length();
-                if (fileSizeLimit != null && fileSize > fileSizeLimit) {
-                    try {
-                        tempFile.toFile().delete();
-                    } catch (Exception ex) {
-                    }
-                    throw new IOException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit)));
-                }
-
-            } else {
-                throw new IOException("Temp directory is not configured.");
-            }
-            logger.fine("mime type supplied: " + suppliedContentType);
-            // Let's try our own utilities (Jhove, etc.) to determine the file type
-            // of the uploaded file. (We may already have a mime type supplied for this
-            // file - maybe the type that the browser recognized on upload; or, if
-            // it's a harvest, maybe the remote server has already given us the type
-            // for this file... with our own type utility we may or may not do better
-            // than the type supplied:
-            // -- L.A.
-            String recognizedType = null;
-
-            try {
-                recognizedType = determineFileType(tempFile.toFile(), fileName);
-                logger.fine("File utility recognized the file as " + recognizedType);
-                if (recognizedType != null && !recognizedType.equals("")) {
-                    if (useRecognizedType(suppliedContentType, recognizedType)) {
-                        finalType = recognizedType;
-                    }
-                }
-
-            } catch (Exception ex) {
-                logger.warning("Failed to run the file utility mime type check on file " + fileName);
-            }
-
-            if (finalType == null) {
-                finalType = (suppliedContentType == null || suppliedContentType.equals(""))
-                        ? MIME_TYPE_UNDETERMINED_DEFAULT
-                        : suppliedContentType;
-            }
-
-            // A few special cases:
-            // if this is a gzipped FITS file, we'll uncompress it, and ingest it as
-            // a regular FITS file:
-            if (finalType.equals("application/fits-gzipped")) {
-
-                InputStream uncompressedIn = null;
-                String finalFileName = fileName;
-                // if the file name had the ".gz" extension, remove it,
-                // since we are going to uncompress it:
-                if (fileName != null && fileName.matches(".*\\.gz$")) {
-                    finalFileName = fileName.replaceAll("\\.gz$", "");
-                }
-
-                DataFile datafile = null;
-                try {
-                    uncompressedIn = new GZIPInputStream(new FileInputStream(tempFile.toFile()));
-                    File unZippedTempFile = saveInputStreamInTempFile(uncompressedIn, fileSizeLimit);
-                    datafile = createSingleDataFile(version, unZippedTempFile, finalFileName, MIME_TYPE_UNDETERMINED_DEFAULT, systemConfig.getFileFixityChecksumAlgorithm());
-                } catch (IOException | FileExceedsMaxSizeException ioex) {
-                    datafile = null;
-                } finally {
-                    if (uncompressedIn != null) {
-                        try {
-                            uncompressedIn.close();
-                        } catch (IOException e) {
-                        }
-                    }
-                }
-
-                // If we were able to produce an uncompressed file, we'll use it
-                // to create and return a final DataFile; if not, we're not going
-                // to do anything - and then a new DataFile will be created further
-                // down, from the original, uncompressed file.
-                if (datafile != null) {
-                    // remove the compressed temp file:
-                    try {
-                        tempFile.toFile().delete();
-                    } catch (SecurityException ex) {
-                        // (this is very non-fatal)
-                        logger.warning("Failed to delete temporary file " + tempFile.toString());
-                    }
-
-                    datafiles.add(datafile);
-                    return CreateDataFileResult.success(fileName, finalType, datafiles);
-                }
-
-                // If it's a ZIP file, we are going to unpack it and create multiple
-                // DataFile objects from its contents:
-            } else if (finalType.equals("application/zip")) {
-
-                ZipInputStream unZippedIn = null;
-                ZipEntry zipEntry = null;
-
-                int fileNumberLimit = systemConfig.getZipUploadFilesLimit();
-
-                try {
-                    Charset charset = null;
-                    /*
-                	TODO: (?)
-                	We may want to investigate somehow letting the user specify 
-                	the charset for the filenames in the zip file...
-                    - otherwise, ZipInputStream bails out if it encounteres a file 
-                	name that's not valid in the current charest (i.e., UTF-8, in 
-                    our case). It would be a bit trickier than what we're doing for 
-                    SPSS tabular ingests - with the lang. encoding pulldown menu - 
-                	because this encoding needs to be specified *before* we upload and
-                    attempt to unzip the file. 
-                	        -- L.A. 4.0 beta12
-                	logger.info("default charset is "+Charset.defaultCharset().name());
-                	if (Charset.isSupported("US-ASCII")) {
-                    	logger.info("charset US-ASCII is supported.");
-                    	charset = Charset.forName("US-ASCII");
-                    	if (charset != null) {
-                       	    logger.info("was able to obtain charset for US-ASCII");
-                    	}
-                    
-                	 }
-                     */
-
-                    if (charset != null) {
-                        unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()), charset);
-                    } else {
-                        unZippedIn = new ZipInputStream(new FileInputStream(tempFile.toFile()));
-                    }
-
-                    while (true) {
-                        try {
-                            zipEntry = unZippedIn.getNextEntry();
-                        } catch (IllegalArgumentException iaex) {
-                            // Note:
-                            // ZipInputStream documentation doesn't even mention that
-                            // getNextEntry() throws an IllegalArgumentException!
-                            // but that's what happens if the file name of the next
-                            // entry is not valid in the current CharSet.
-                            // -- L.A.
-                            warningMessage = "Failed to unpack Zip file. (Unknown Character Set used in a file name?) Saving the file as is.";
-                            logger.warning(warningMessage);
-                            throw new IOException();
-                        }
-
-                        if (zipEntry == null) {
-                            break;
-                        }
-                        // Note that some zip entries may be directories - we
-                        // simply skip them:
-
-                        if (!zipEntry.isDirectory()) {
-                            if (datafiles.size() > fileNumberLimit) {
-                                logger.warning("Zip upload - too many files.");
-                                warningMessage = "The number of files in the zip archive is over the limit (" + fileNumberLimit
-                                        + "); please upload a zip archive with fewer files, if you want them to be ingested "
-                                        + "as individual DataFiles.";
-                                throw new IOException();
-                            }
-
-                            String fileEntryName = zipEntry.getName();
-                            logger.fine("ZipEntry, file: " + fileEntryName);
-
-                            if (fileEntryName != null && !fileEntryName.equals("")) {
-
-                                String shortName = fileEntryName.replaceFirst("^.*[\\/]", "");
-
-                                // Check if it's a "fake" file - a zip archive entry
-                                // created for a MacOS X filesystem element: (these
-                                // start with "._")
-                                if (!shortName.startsWith("._") && !shortName.startsWith(".DS_Store") && !"".equals(shortName)) {
-                                    // OK, this seems like an OK file entry - we'll try
-                                    // to read it and create a DataFile with it:
-
-                                    File unZippedTempFile = saveInputStreamInTempFile(unZippedIn, fileSizeLimit);
-                                    DataFile datafile = createSingleDataFile(version, unZippedTempFile, null, shortName,
-                                            MIME_TYPE_UNDETERMINED_DEFAULT,
-                                            systemConfig.getFileFixityChecksumAlgorithm(), null, false);
-
-                                    if (!fileEntryName.equals(shortName)) {
-                                        // If the filename looks like a hierarchical folder name (i.e., contains slashes and backslashes),
-                                        // we'll extract the directory name; then subject it to some "aggressive sanitizing" - strip all 
-                                        // the leading, trailing and duplicate slashes; then replace all the characters that 
-                                        // don't pass our validation rules.
-                                        String directoryName = fileEntryName.replaceFirst("[\\\\/][\\\\/]*[^\\\\/]*$", "");
-                                        directoryName = StringUtil.sanitizeFileDirectory(directoryName, true);
-                                        // if (!"".equals(directoryName)) {
-                                        if (!StringUtil.isEmpty(directoryName)) {
-                                            logger.fine("setting the directory label to " + directoryName);
-                                            datafile.getFileMetadata().setDirectoryLabel(directoryName);
-                                        }
-                                    }
-
-                                    if (datafile != null) {
-                                        // We have created this datafile with the mime type "unknown";
-                                        // Now that we have it saved in a temporary location,
-                                        // let's try and determine its real type:
-
-                                        String tempFileName = getFilesTempDirectory() + "/" + datafile.getStorageIdentifier();
-
-                                        try {
-                                            recognizedType = determineFileType(new File(tempFileName), shortName);
-                                            logger.fine("File utility recognized unzipped file as " + recognizedType);
-                                            if (recognizedType != null && !recognizedType.equals("")) {
-                                                datafile.setContentType(recognizedType);
-                                            }
-                                        } catch (Exception ex) {
-                                            logger.warning("Failed to run the file utility mime type check on file " + fileName);
-                                        }
-
-                                        datafiles.add(datafile);
-                                    }
-                                }
-                            }
-                        }
-                        unZippedIn.closeEntry();
-
-                    }
-
-                } catch (IOException ioex) {
-                    // just clear the datafiles list and let
-                    // ingest default to creating a single DataFile out
-                    // of the unzipped file.
-                    logger.warning("Unzipping failed; rolling back to saving the file as is.");
-                    if (warningMessage == null) {
-                        warningMessage = BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed");
-                    }
-
-                    datafiles.clear();
-                } catch (FileExceedsMaxSizeException femsx) {
-                    logger.warning("One of the unzipped files exceeds the size limit; resorting to saving the file as is. " + femsx.getMessage());
-                    warningMessage =  BundleUtil.getStringFromBundle("file.addreplace.warning.unzip.failed.size", Arrays.asList(FileSizeChecker.bytesToHumanReadable(fileSizeLimit)));
-                    datafiles.clear();
-                } finally {
-                    if (unZippedIn != null) {
-                        try {
-                            unZippedIn.close();
-                        } catch (Exception zEx) {
-                        }
-                    }
-                }
-                if (datafiles.size() > 0) {
-                    // link the data files to the dataset/version:
-                    // (except we no longer want to do this! -- 4.6)
-                    /*Iterator<DataFile> itf = datafiles.iterator();
-                	while (itf.hasNext()) {
-                    	DataFile datafile = itf.next();
-                    	datafile.setOwner(version.getDataset());
-                        if (version.getFileMetadatas() == null) {
-                        	version.setFileMetadatas(new ArrayList());
-                        }
-                    	version.getFileMetadatas().add(datafile.getFileMetadata());
-                    	datafile.getFileMetadata().setDatasetVersion(version);
-                    
-                    	version.getDataset().getFiles().add(datafile);
-                	} */
-                    // remove the uploaded zip file:
-                    try {
-                        Files.delete(tempFile);
-                    } catch (IOException ioex) {
-                        // do nothing - it's just a temp file.
-                        logger.warning("Could not remove temp file " + tempFile.getFileName().toString());
-                    }
-                    // and return:
-                    return CreateDataFileResult.success(fileName, finalType, datafiles);
-                }
-
-            } else if (finalType.equalsIgnoreCase(ShapefileHandler.SHAPEFILE_FILE_TYPE)) {
-                // Shape files may have to be split into multiple files,
-                // one zip archive per each complete set of shape files:
-
-                // File rezipFolder = new File(this.getFilesTempDirectory());
-                File rezipFolder = getShapefileUnzipTempDirectory();
-
-                IngestServiceShapefileHelper shpIngestHelper;
-                shpIngestHelper = new IngestServiceShapefileHelper(tempFile.toFile(), rezipFolder);
-
-                boolean didProcessWork = shpIngestHelper.processFile();
-                if (!(didProcessWork)) {
-                    logger.severe("Processing of zipped shapefile failed.");
-                    return CreateDataFileResult.error(fileName, finalType);
-                }
-
-                try {
-                    for (File finalFile : shpIngestHelper.getFinalRezippedFiles()) {
-                        FileInputStream finalFileInputStream = new FileInputStream(finalFile);
-                        finalType = determineContentType(finalFile);
-                        if (finalType == null) {
-                            logger.warning("Content type is null; but should default to 'MIME_TYPE_UNDETERMINED_DEFAULT'");
-                            continue;
-                        }
-
-                        File unZippedShapeTempFile = saveInputStreamInTempFile(finalFileInputStream, fileSizeLimit);
-                        DataFile new_datafile = createSingleDataFile(version, unZippedShapeTempFile, finalFile.getName(), finalType, systemConfig.getFileFixityChecksumAlgorithm());
-                        String directoryName = null;
-                        String absolutePathName = finalFile.getParent();
-                        if (absolutePathName != null) {
-                            if (absolutePathName.length() > rezipFolder.toString().length()) {
-                                // This file lives in a subfolder - we want to 
-                                // preserve it in the FileMetadata:
-                                directoryName = absolutePathName.substring(rezipFolder.toString().length() + 1);
-
-                                if (!StringUtil.isEmpty(directoryName)) {
-                                    new_datafile.getFileMetadata().setDirectoryLabel(directoryName);
-                                }
-                            }
-                        }
-                        if (new_datafile != null) {
-                            datafiles.add(new_datafile);
-                        } else {
-                            logger.severe("Could not add part of rezipped shapefile. new_datafile was null: " + finalFile.getName());
-                        }
-                        finalFileInputStream.close();
-
-                    }
-                } catch (FileExceedsMaxSizeException femsx) {
-                    logger.severe("One of the unzipped shape files exceeded the size limit; giving up. " + femsx.getMessage());
-                    datafiles.clear();
-                }
-
-                // Delete the temp directory used for unzipping
-                // The try-catch is due to error encountered in using NFS for stocking file,
-                // cf. https://github.com/IQSS/dataverse/issues/5909
-                try {
-                    FileUtils.deleteDirectory(rezipFolder);
-                } catch (IOException ioex) {
-                    // do nothing - it's a tempo folder.
-                    logger.warning("Could not remove temp folder, error message : " + ioex.getMessage());
-                }
-
-                if (datafiles.size() > 0) {
-                    // remove the uploaded zip file:
-                    try {
-                        Files.delete(tempFile);
-                    } catch (IOException ioex) {
-                        // do nothing - it's just a temp file.
-                        logger.warning("Could not remove temp file " + tempFile.getFileName().toString());
-                    } catch (SecurityException se) {
-                        logger.warning("Unable to delete: " + tempFile.toString() + "due to Security Exception: "
-                                + se.getMessage());
-                    }
-                    return CreateDataFileResult.success(fileName, finalType, datafiles);
-                } else {
-                    logger.severe("No files added from directory of rezipped shapefiles");
-                }
-                return CreateDataFileResult.error(fileName, finalType);
-
-            } else if (finalType.equalsIgnoreCase(BagItFileHandler.FILE_TYPE)) {
-                Optional<BagItFileHandler> bagItFileHandler = CDI.current().select(BagItFileHandlerFactory.class).get().getBagItFileHandler();
-                if (bagItFileHandler.isPresent()) {
-                    CreateDataFileResult result = bagItFileHandler.get().handleBagItPackage(systemConfig, version, fileName, tempFile.toFile());
-                    return result;
-                }
-            }
-        } else {
-            // Default to suppliedContentType if set or the overall undetermined default if a contenttype isn't supplied
-            finalType = StringUtils.isBlank(suppliedContentType) ? FileUtil.MIME_TYPE_UNDETERMINED_DEFAULT : suppliedContentType;
-            String type = determineFileTypeByNameAndExtension(fileName);
-            if (!StringUtils.isBlank(type)) {
-                //Use rules for deciding when to trust browser supplied type
-                if (useRecognizedType(finalType, type)) {
-                    finalType = type;
-                }
-                logger.fine("Supplied type: " + suppliedContentType + ", finalType: " + finalType);
-            }
-        }
-        // Finally, if none of the special cases above were applicable (or 
-        // if we were unable to unpack an uploaded file, etc.), we'll just 
-        // create and return a single DataFile:
-        File newFile = null;
-        if (tempFile != null) {
-            newFile = tempFile.toFile();
-        }
-        
-
-        DataFile datafile = createSingleDataFile(version, newFile, newStorageIdentifier, fileName, finalType, newCheckSumType, newCheckSum);
-        File f = null;
-        if (tempFile != null) {
-            f = tempFile.toFile();
-        }
-        if (datafile != null && ((f != null) || (newStorageIdentifier != null))) {
-
-            if (warningMessage != null) {
-                createIngestFailureReport(datafile, warningMessage);
-                datafile.SetIngestProblem();
-            }
-            datafiles.add(datafile);
-
-            return CreateDataFileResult.success(fileName, finalType, datafiles);
-        }
-
-        return CreateDataFileResult.error(fileName, finalType);
-    }   // end createDataFiles
-    
 
 	public static boolean useRecognizedType(String suppliedContentType, String recognizedType) {
 		// is it any better than the type that was supplied to us,
@@ -1271,7 +832,12 @@ public static boolean useRecognizedType(String suppliedContentType, String recog
 	}
 
 	public static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit)
-            throws IOException, FileExceedsMaxSizeException {
+            throws IOException, FileExceedsMaxSizeException, FileExceedsStorageQuotaException {
+            return saveInputStreamInTempFile(inputStream, fileSizeLimit, null);
+        }
+        
+        public static File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit, Long storageQuotaLimit)
+            throws IOException, FileExceedsMaxSizeException, FileExceedsStorageQuotaException {
         Path tempFile = Files.createTempFile(Paths.get(getFilesTempDirectory()), "tmp", "upload");
         
         if (inputStream != null && tempFile != null) {
@@ -1282,7 +848,12 @@ public static File saveInputStreamInTempFile(InputStream inputStream, Long fileS
             Long fileSize = tempFile.toFile().length();
             if (fileSizeLimit != null && fileSize > fileSizeLimit) {
                 try {tempFile.toFile().delete();} catch (Exception ex) {}
-                throw new FileExceedsMaxSizeException (MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit)));  
+                throw new FileExceedsMaxSizeException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.file_exceeds_limit"), bytesToHumanReadable(fileSize), bytesToHumanReadable(fileSizeLimit)));  
+            }
+            
+            if (storageQuotaLimit != null && fileSize > storageQuotaLimit) {
+                try {tempFile.toFile().delete();} catch (Exception ex) {}
+                throw new FileExceedsStorageQuotaException(MessageFormat.format(BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded"), bytesToHumanReadable(fileSize), bytesToHumanReadable(storageQuotaLimit)));  
             }
             
             return tempFile.toFile();
@@ -1325,7 +896,6 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil
         datafile.setPermissionModificationTime(new Timestamp(new Date().getTime()));
         FileMetadata fmd = new FileMetadata();
 
-        // TODO: add directoryLabel?
         fmd.setLabel(fileName);
 
         if (addToDataset) {
@@ -1341,13 +911,13 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil
             fmd.setDatasetVersion(version);
             version.getDataset().getFiles().add(datafile);
         }
-        if(storageIdentifier==null) {
-        generateStorageIdentifier(datafile);
-        if (!tempFile.renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) {
-            return null;
-        }
+        if (storageIdentifier == null) {
+            generateStorageIdentifier(datafile);
+            if (!tempFile.renameTo(new File(getFilesTempDirectory() + "/" + datafile.getStorageIdentifier()))) {
+                return null;
+            }
         } else {
-        	datafile.setStorageIdentifier(storageIdentifier);
+            datafile.setStorageIdentifier(storageIdentifier);
         }
 
         if ((checksum !=null)&&(!checksum.isEmpty())) {
@@ -1372,7 +942,7 @@ public static DataFile createSingleDataFile(DatasetVersion version, File tempFil
     
         Naming convention: getFilesTempDirectory() + "shp_" + "yyyy-MM-dd-hh-mm-ss-SSS"
     */
-    private static File getShapefileUnzipTempDirectory(){
+    public static File getShapefileUnzipTempDirectory(){
         
         String tempDirectory = getFilesTempDirectory();
         if (tempDirectory == null){
@@ -1436,25 +1006,17 @@ public static boolean canIngestAsTabular(String mimeType) {
         }
     }
     
+    /**
+     * Return the location where data should be stored temporarily after uploading (UI or API)
+     * for local processing (ingest, unzip, ...) and transfer to final destination (see storage subsystem).
+     *
+     * This location is checked to be configured, does exist, and is writeable via
+     * {@link ConfigCheckService#checkSystemDirectories()}.
+     *
+     * @return String with a path to the temporary location. Will not be null (former versions did to indicate failure)
+     */
     public static String getFilesTempDirectory() {
-        
-        String filesRootDirectory = JvmSettings.FILES_DIRECTORY.lookup();
-        String filesTempDirectory = filesRootDirectory + "/temp";
-
-        if (!Files.exists(Paths.get(filesTempDirectory))) {
-            /* Note that "createDirectories()" must be used - not 
-             * "createDirectory()", to make sure all the parent 
-             * directories that may not yet exist are created as well. 
-             */
-            try {
-                Files.createDirectories(Paths.get(filesTempDirectory));
-            } catch (IOException ex) {
-                logger.severe("Failed to create filesTempDirectory: " + filesTempDirectory );
-                return null;
-            }
-        }
-
-        return filesTempDirectory;
+        return JvmSettings.FILES_DIRECTORY.lookup() + File.separator + "temp";
     }
     
     public static void generateS3PackageStorageIdentifier(DataFile dataFile) {
@@ -1556,6 +1118,11 @@ public static boolean isRequestAccessPopupRequired(DatasetVersion datasetVersion
         if (answer != null) {
             return answer;
         }
+     // 3. Guest Book:
+        if (datasetVersion.getDataset() != null && datasetVersion.getDataset().getGuestbook() != null && datasetVersion.getDataset().getGuestbook().isEnabled() && datasetVersion.getDataset().getGuestbook().getDataverse() != null) {
+            logger.fine("Request access popup required because of guestbook.");
+            return true;
+        }
         logger.fine("Request access popup is not required.");
         return false;
     }
@@ -1597,6 +1164,71 @@ private static Boolean popupDueToStateOrTerms(DatasetVersion datasetVersion) {
         return null;
     }
 
+    /**
+     * isGuestbookAndTermsPopupRequired
+     * meant to replace both isDownloadPopupRequired() and isRequestAccessDownloadPopupRequired() when the guestbook-terms-popup-fragment.xhtml
+     * replaced file-download-popup-fragment.xhtml and file-request-access-popup-fragment.xhtml
+     * @param datasetVersion
+     * @return boolean
+     */
+
+    public static boolean isGuestbookAndTermsPopupRequired(DatasetVersion datasetVersion) {
+        return isGuestbookPopupRequired(datasetVersion) || isTermsPopupRequired(datasetVersion);
+    }
+
+    public static boolean isGuestbookPopupRequired(DatasetVersion datasetVersion) {
+
+        if (datasetVersion == null) {
+            logger.fine("GuestbookPopup not required because datasetVersion is null.");
+            return false;
+        }
+        //0. if version is draft then Popup "not required"
+        if (!datasetVersion.isReleased()) {
+            logger.fine("GuestbookPopup not required because datasetVersion has not been released.");
+            return false;
+        }
+
+        // 3. Guest Book:
+        if (datasetVersion.getDataset() != null && datasetVersion.getDataset().getGuestbook() != null && datasetVersion.getDataset().getGuestbook().isEnabled() && datasetVersion.getDataset().getGuestbook().getDataverse() != null) {
+            logger.fine("GuestbookPopup required because an enabled guestbook exists.");
+            return true;
+        }
+
+        logger.fine("GuestbookPopup is not required.");
+        return false;
+    }
+
+    public static boolean isTermsPopupRequired(DatasetVersion datasetVersion) {
+
+        if (datasetVersion == null) {
+            logger.fine("TermsPopup not required because datasetVersion is null.");
+            return false;
+        }
+        //0. if version is draft then Popup "not required"
+        if (!datasetVersion.isReleased()) {
+            logger.fine("TermsPopup not required because datasetVersion has not been released.");
+            return false;
+        }
+        // 1. License and Terms of Use:
+        if (datasetVersion.getTermsOfUseAndAccess() != null) {
+            if (!License.CC0.equals(datasetVersion.getTermsOfUseAndAccess().getLicense())
+                    && !(datasetVersion.getTermsOfUseAndAccess().getTermsOfUse() == null
+                    || datasetVersion.getTermsOfUseAndAccess().getTermsOfUse().equals(""))) {
+                logger.fine("TermsPopup required because of license or terms of use.");
+                return true;
+            }
+
+            // 2. Terms of Access:
+            if (!(datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess() == null) && !datasetVersion.getTermsOfUseAndAccess().getTermsOfAccess().equals("")) {
+                logger.fine("TermsPopup required because of terms of access.");
+                return true;
+            }
+        }
+
+        logger.fine("TermsPopup is not required.");
+        return false;
+    }
+    
     /**
      * Provide download URL if no Terms of Use, no guestbook, and not
      * restricted.
@@ -1817,6 +1449,17 @@ public static S3AccessIO getS3AccessForDirectUpload(Dataset dataset) {
     	return s3io;
     }
     
+    private static InputStream getOriginalFileInputStream(StorageIO<DataFile> storage, boolean isTabularData) throws IOException {
+        storage.open(DataAccessOption.READ_ACCESS);
+        if (!isTabularData) {
+            return storage.getInputStream();
+        } else {
+            // if this is a tabular file, read the preserved original "auxiliary file"
+            // instead:
+            return storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION);
+        }
+    }
+
     public static void validateDataFileChecksum(DataFile dataFile) throws IOException {
         DataFile.ChecksumType checksumType = dataFile.getChecksumType();
         if (checksumType == null) {
@@ -1826,35 +1469,24 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio
         }
 
         StorageIO<DataFile> storage = dataFile.getStorageIO();
-        InputStream in = null;
-
-        try {
-            storage.open(DataAccessOption.READ_ACCESS);
+        String recalculatedChecksum = null;
 
-            if (!dataFile.isTabularData()) {
-                in = storage.getInputStream();
-            } else {
-                // if this is a tabular file, read the preserved original "auxiliary file"
-                // instead:
-                in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION);
-            }
+        try (InputStream inputStream = getOriginalFileInputStream(storage, dataFile.isTabularData())) {
+            recalculatedChecksum = FileUtil.calculateChecksum(inputStream, checksumType);
         } catch (IOException ioex) {
-            in = null;
-        }
-
-        if (in == null) {
             String info = BundleUtil.getStringFromBundle("dataset.publish.file.validation.error.failRead", Arrays.asList(dataFile.getId().toString()));
             logger.log(Level.INFO, info);
             throw new IOException(info);
-        }
-
-        String recalculatedChecksum = null;
-        try {
-            recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType);
         } catch (RuntimeException rte) {
+            logger.log(Level.SEVERE, "failed to calculated checksum, one retry", rte);
             recalculatedChecksum = null;
-        } finally {
-            IOUtils.closeQuietly(in);
+        }
+
+        if (recalculatedChecksum == null) { //retry once
+            storage = dataFile.getStorageIO();
+            try (InputStream inputStream = getOriginalFileInputStream(storage, dataFile.isTabularData())) {
+                recalculatedChecksum = FileUtil.calculateChecksum(inputStream, checksumType);
+            }
         }
 
         if (recalculatedChecksum == null) {
@@ -1872,19 +1504,12 @@ public static void validateDataFileChecksum(DataFile dataFile) throws IOExceptio
             boolean fixed = false;
             if (!dataFile.isTabularData() && dataFile.getIngestReport() != null) {
                 // try again, see if the .orig file happens to be there:
-                try {
-                    in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION);
-                } catch (IOException ioex) {
-                    in = null;
+                try (InputStream in = storage.getAuxFileAsInputStream(FileUtil.SAVED_ORIGINAL_FILENAME_EXTENSION)) {
+                    recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType);
+                } catch (RuntimeException rte) {
+                    recalculatedChecksum = null;
                 }
-                if (in != null) {
-                    try {
-                        recalculatedChecksum = FileUtil.calculateChecksum(in, checksumType);
-                    } catch (RuntimeException rte) {
-                        recalculatedChecksum = null;
-                    } finally {
-                        IOUtils.closeQuietly(in);
-                    }
+                if (recalculatedChecksum != null) {
                     // try again:
                     if (recalculatedChecksum.equals(dataFile.getChecksumValue())) {
                         fixed = true;
@@ -2108,7 +1733,7 @@ private static String getFileAccessUrl(FileMetadata fileMetadata, String apiLoca
     private static String getFolderAccessUrl(DatasetVersion version, String currentFolder, String subFolder, String apiLocation, boolean originals) {
         String datasetId = version.getDataset().getId().toString();
         String versionTag = version.getFriendlyVersionNumber();
-        versionTag = versionTag.replace("DRAFT", ":draft");
+        versionTag = versionTag.replace("DRAFT", DS_VERSION_DRAFT);
         if (!"".equals(currentFolder)) {
             subFolder = currentFolder + "/" + subFolder;
         }
@@ -2172,5 +1797,11 @@ public static boolean isActivelyEmbargoed(List<FileMetadata> fmdList) {
         }
         return false;
     }
+
+
+    public static String getStorageDriver(DataFile dataFile) {
+        String storageIdentifier = dataFile.getStorageIdentifier();
+        return storageIdentifier.substring(0, storageIdentifier.indexOf(DataAccess.SEPARATOR));
+    }
     
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/FirstNames.java b/src/main/java/edu/harvard/iq/dataverse/util/FirstNames.java
similarity index 98%
rename from src/main/java/edu/harvard/iq/dataverse/export/openaire/FirstNames.java
rename to src/main/java/edu/harvard/iq/dataverse/util/FirstNames.java
index 6a7bfe400f0..d82aa5e59b2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/FirstNames.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/FirstNames.java
@@ -1,4 +1,4 @@
-package edu.harvard.iq.dataverse.export.openaire;
+package edu.harvard.iq.dataverse.util;
 
 import java.io.BufferedReader;
 import java.io.IOException;
@@ -9,10 +9,10 @@
 import java.util.logging.Level;
 
 /**
- *
+ * Used by PersonOrOrgUtil
  * @author francesco.cadili@4science.it
  */
-public class FirstNames {
+class FirstNames {
 
     private static FirstNames instance = null;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java
index 5b87b18573b..b02ac63cacd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/JsfHelper.java
@@ -1,11 +1,9 @@
 package edu.harvard.iq.dataverse.util;
 
-import java.util.Locale;
-import java.util.ResourceBundle;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.faces.application.FacesMessage;
-import javax.faces.context.FacesContext;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.context.FacesContext;
 
 /**
  * Utility class for common JSF tasks.
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java
index 72980c3451a..0724e53700b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/MailUtil.java
@@ -1,5 +1,6 @@
 package edu.harvard.iq.dataverse.util;
 
+import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.UserNotification;
@@ -8,8 +9,8 @@
 import java.util.Arrays;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
 
 public class MailUtil {
 
@@ -39,6 +40,8 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti
                 datasetDisplayName = ((Dataset) objectOfNotification).getDisplayName();
             } else if (objectOfNotification instanceof DatasetVersion) {
                 datasetDisplayName = ((DatasetVersion) objectOfNotification).getDataset().getDisplayName();
+            } else if (objectOfNotification instanceof DataFile) {
+                datasetDisplayName = ((DataFile) objectOfNotification).getOwner().getDisplayName();
             }
         }
 
@@ -50,7 +53,9 @@ public static String getSubjectTextBasedOnNotification(UserNotification userNoti
             case CREATEDV:
                 return BundleUtil.getStringFromBundle("notification.email.create.dataverse.subject", rootDvNameAsList);
             case REQUESTFILEACCESS:
-                return BundleUtil.getStringFromBundle("notification.email.request.file.access.subject", rootDvNameAsList);
+                return BundleUtil.getStringFromBundle("notification.email.request.file.access.subject", Arrays.asList(rootDvNameAsList.get(0), datasetDisplayName));
+            case REQUESTEDFILEACCESS:
+                return BundleUtil.getStringFromBundle("notification.email.requested.file.access.subject", Arrays.asList(rootDvNameAsList.get(0), datasetDisplayName));
             case GRANTFILEACCESS:
                 return BundleUtil.getStringFromBundle("notification.email.grant.file.access.subject", rootDvNameAsList);
             case REJECTFILEACCESS:
diff --git a/src/main/java/edu/harvard/iq/dataverse/export/openaire/Organizations.java b/src/main/java/edu/harvard/iq/dataverse/util/Organizations.java
similarity index 97%
rename from src/main/java/edu/harvard/iq/dataverse/export/openaire/Organizations.java
rename to src/main/java/edu/harvard/iq/dataverse/util/Organizations.java
index d08ea723176..475afdb48b8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/export/openaire/Organizations.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/Organizations.java
@@ -1,6 +1,5 @@
-package edu.harvard.iq.dataverse.export.openaire;
+package edu.harvard.iq.dataverse.util;
 
-import edu.harvard.iq.dataverse.util.StringUtil;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.ArrayList;
@@ -15,10 +14,10 @@
 import opennlp.tools.util.Span;
 
 /**
- *
+ * Used by PersonOrOrgUtil
  * @author francesco.cadili@4science.it
  */
-public class Organizations {
+class Organizations {
 
     private static Organizations instance = null;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java
index da33fc9597e..f68957ad060 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtil.java
@@ -4,14 +4,11 @@
 import java.util.List;
 import java.util.logging.Logger;
 
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-
-import edu.harvard.iq.dataverse.export.openaire.Cleanup;
-import edu.harvard.iq.dataverse.export.openaire.FirstNames;
-import edu.harvard.iq.dataverse.export.openaire.Organizations;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
 
@@ -69,7 +66,7 @@ public class PersonOrOrgUtil {
      * @return
      */
     public static JsonObject getPersonOrOrganization(String name, boolean organizationIfTied, boolean isPerson) {
-        name = Cleanup.normalize(name);
+        name = StringUtil.normalize(name);
 
         String givenName = null;
         String familyName = null;
@@ -121,6 +118,15 @@ public static JsonObject getPersonOrOrganization(String name, boolean organizati
                 }
             }
         }
+        if(!isOrganization && givenName == null && name.contains(",")) {
+            //If we still think this is a person and there's only one comma, assume we can extract the given name and family name
+            if (!name.replaceFirst(",", "").contains(",")) {
+                // contributorName=<FamilyName>, <FirstName>
+                String[] fullName = name.split(", ");
+                givenName = fullName[1];
+                familyName = fullName[0];
+            }
+        }
         JsonObjectBuilder job = new NullSafeJsonBuilder();
         job.add("fullName", name);
         job.add("givenName", givenName);
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java b/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java
index 0221d45e4b0..fdac50ee58a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/RequiredCheckboxValidator.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
 import java.text.MessageFormat;
-import javax.faces.application.FacesMessage;
-import javax.faces.component.UIComponent;
-import javax.faces.component.UIInput;
-import javax.faces.context.FacesContext;
-import javax.faces.validator.Validator;
-import javax.faces.validator.ValidatorException;
+import jakarta.faces.application.FacesMessage;
+import jakarta.faces.component.UIComponent;
+import jakarta.faces.component.UIInput;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.validator.Validator;
+import jakarta.faces.validator.ValidatorException;
 
 /**
  * from http://balusc.blogspot.com/2008/09/validate-required-checkbox.html via
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java
index 0539ea40cb8..effa1980d70 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SessionUtil.java
@@ -4,8 +4,8 @@
 import java.util.HashMap;
 import java.util.Map.Entry;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpSession;
 
 public class SessionUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java
new file mode 100644
index 00000000000..1826689b892
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java
@@ -0,0 +1,269 @@
+package edu.harvard.iq.dataverse.util;
+
+/**
+  Eko Indarto, DANS
+  Vic Ding, DANS
+
+  This file prepares the resources used in Signposting
+
+  Two configurable options allow changing the limit for the number of authors or datafiles (items) allowed in the level-1 header.
+  If more than this number exists, no entries of that type are included in the level-1 header.
+  See the documentation for the dataverse.signposting.level1-author-limit, and dataverse.signposting.level1-item-limit
+
+  Also note that per the signposting spec, authors for which no PID/URL has been provided are not included in the signposting output.
+  
+ */
+
+import edu.harvard.iq.dataverse.*;
+import edu.harvard.iq.dataverse.dataset.DatasetUtil;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Objects;
+import java.util.logging.Logger;
+
+import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
+
+public class SignpostingResources {
+    private static final Logger logger = Logger.getLogger(SignpostingResources.class.getCanonicalName());
+    SystemConfig systemConfig;
+    DatasetVersion workingDatasetVersion;
+    static final String defaultFileTypeValue = "https://schema.org/Dataset";
+    static final int defaultMaxLinks = 5;
+    int maxAuthors;
+    int maxItems;
+
+    public SignpostingResources(SystemConfig systemConfig, DatasetVersion workingDatasetVersion, String authorLimitSetting, String itemLimitSetting) {
+        this.systemConfig = systemConfig;
+        this.workingDatasetVersion = workingDatasetVersion;
+        maxAuthors = SystemConfig.getIntLimitFromStringOrDefault(authorLimitSetting, defaultMaxLinks);
+        maxItems = SystemConfig.getIntLimitFromStringOrDefault(itemLimitSetting, defaultMaxLinks);
+    }
+
+
+    /**
+     * Get key, values of signposting items and return as string
+     *
+     * @return comma delimited string
+     */
+    public String getLinks() {
+        List<String> valueList = new LinkedList<>();
+        Dataset ds = workingDatasetVersion.getDataset();
+
+        String identifierSchema = getAuthorsAsString(getAuthorURLs(true));
+        if (identifierSchema != null && !identifierSchema.isEmpty()) {
+            valueList.add(identifierSchema);
+        }
+
+        if (!Objects.equals(ds.getPersistentURL(), "")) {
+            String citeAs = "<" + ds.getPersistentURL() + ">;rel=\"cite-as\"";
+            valueList.add(citeAs);
+        }
+
+        List<FileMetadata> fms = workingDatasetVersion.getFileMetadatas();
+        String items = getItems(fms);
+        if (items != null && !Objects.equals(items, "")) {
+            valueList.add(items);
+        }
+
+        String describedby = "<" + ds.getGlobalId().asURL().toString() + ">;rel=\"describedby\"" + ";type=\"" + "application/vnd.citationstyles.csl+json\"";
+        describedby += ",<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId="
+                + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/ld+json\"";
+        valueList.add(describedby);
+
+        String type = "<https://schema.org/AboutPage>;rel=\"type\"";
+        type = "<https://schema.org/AboutPage>;rel=\"type\",<" + defaultFileTypeValue + ">;rel=\"type\"";
+        valueList.add(type);
+
+        String licenseString = "<" + DatasetUtil.getLicenseURI(workingDatasetVersion) + ">;rel=\"license\"";
+        valueList.add(licenseString);
+
+        String linkset = "<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/"
+                + workingDatasetVersion.getVersionNumber() + "." + workingDatasetVersion.getMinorVersionNumber()
+                + "/linkset?persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + "> ; rel=\"linkset\";type=\"application/linkset+json\"";
+        valueList.add(linkset);
+        logger.fine(String.format("valueList is: %s", valueList));
+
+        return String.join(", ", valueList);
+    }
+
+    public JsonArrayBuilder getJsonLinkset() {
+        Dataset ds = workingDatasetVersion.getDataset();
+        GlobalId gid = ds.getGlobalId();
+        String landingPage = systemConfig.getDataverseSiteUrl() + "/dataset.xhtml?persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier();
+        JsonArrayBuilder authors = getJsonAuthors(getAuthorURLs(false));
+        JsonArrayBuilder items = getJsonItems();
+
+        String licenseString = DatasetUtil.getLicenseURI(workingDatasetVersion);
+
+        JsonArrayBuilder mediaTypes = Json.createArrayBuilder();
+        mediaTypes.add(
+                jsonObjectBuilder().add(
+                        "href",
+                        gid.asURL().toString()
+                ).add(
+                        "type",
+                        "application/vnd.citationstyles.csl+json"
+                )
+        );
+
+        mediaTypes.add(
+                jsonObjectBuilder().add(
+                        "href",
+                        systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier()
+                ).add(
+                        "type",
+                        "application/ld+json"
+                )
+        );
+        JsonArrayBuilder linksetJsonObj = Json.createArrayBuilder();
+
+        JsonObjectBuilder mandatory;
+        mandatory = jsonObjectBuilder().add("anchor", landingPage)
+                .add("cite-as", Json.createArrayBuilder().add(jsonObjectBuilder().add("href", ds.getPersistentURL())))
+                .add("type",
+                        Json.createArrayBuilder().add(jsonObjectBuilder().add("href", "https://schema.org/AboutPage"))
+                                .add(jsonObjectBuilder().add("href", defaultFileTypeValue)));
+
+        if (authors != null) {
+            mandatory.add("author", authors);
+        }
+        if (licenseString != null && !licenseString.isBlank()) {
+            mandatory.add("license", jsonObjectBuilder().add("href", licenseString));
+        }
+        if (!mediaTypes.toString().isBlank()) {
+            mandatory.add("describedby", mediaTypes);
+        }
+        if (items != null) {
+            mandatory.add("item", items);
+        }
+        linksetJsonObj.add(mandatory);
+
+        // remove scholarly type as shown already on landing page
+        for (FileMetadata fm : workingDatasetVersion.getFileMetadatas()) {
+            DataFile df = fm.getDataFile();
+            JsonObjectBuilder itemAnchor = jsonObjectBuilder().add("anchor", getPublicDownloadUrl(df));
+            itemAnchor.add("collection", Json.createArrayBuilder().add(jsonObjectBuilder()
+                    .add("href", landingPage)));
+            linksetJsonObj.add(itemAnchor);
+        }
+
+        return linksetJsonObj;
+    }
+
+    /*Method retrieves all the authors of a DatasetVersion with a valid URL and puts them in a list
+     * @param limit - if true, will return an empty list (for level 1) if more than maxAuthor authors with URLs are found 
+     */
+    private List<String> getAuthorURLs(boolean limit) {
+        List<String> authorURLs = new ArrayList<String>(maxAuthors);
+        int visibleAuthorCounter = 0;
+
+        for (DatasetAuthor da : workingDatasetVersion.getDatasetAuthors()) {
+            logger.fine(String.format("idtype: %s; idvalue: %s, affiliation: %s; identifierUrl: %s", da.getIdType(),
+                    da.getIdValue(), da.getAffiliation(), da.getIdentifierAsUrl()));
+            String authorURL = "";
+            authorURL = getAuthorUrl(da);
+            if (authorURL != null && !authorURL.isBlank()) {
+                // return empty if number of visible author more than max allowed
+                // >= since we're comparing before incrementing visibleAuthorCounter
+                if (visibleAuthorCounter >= maxAuthors) {
+                    authorURLs.clear();
+                    break;
+                }
+                authorURLs.add(authorURL);
+                visibleAuthorCounter++;
+                
+
+            }
+        }
+        return authorURLs;
+    }
+
+
+    /**
+     * Get Authors as string
+     * For example:
+     * if author has VIAF
+     * Link: <http://viaf.org/viaf/:id/>; rel="author"
+     *
+     * @param datasetAuthorURLs list of all DatasetAuthors with a valid URL
+     * @return all the author links in a string
+     */
+    private String getAuthorsAsString(List<String> datasetAuthorURLs) {
+        String singleAuthorString;
+        String identifierSchema = null;
+        for (String authorURL : datasetAuthorURLs) {
+                singleAuthorString = "<" + authorURL + ">;rel=\"author\"";
+                if (identifierSchema == null) {
+                    identifierSchema = singleAuthorString;
+                } else {
+                    identifierSchema = String.join(",", identifierSchema, singleAuthorString);
+                }
+        }
+        logger.fine(String.format("identifierSchema: %s", identifierSchema));
+        return identifierSchema;
+    }
+
+    /* 
+     * 
+     */
+    private String getAuthorUrl(DatasetAuthor da) {
+        String authorURL = "";
+        //If no type and there's a value, assume it is a URL (is this reasonable?)
+        //Otherise, get the URL using the type and value
+        if (da.getIdType() != null && !da.getIdType().isBlank() && da.getIdValue()!=null) {
+            authorURL = da.getIdValue();
+        } else {
+            authorURL = da.getIdentifierAsUrl();
+        }
+        return authorURL;
+    }
+
+    private JsonArrayBuilder getJsonAuthors(List<String> datasetAuthorURLs) {
+        if(datasetAuthorURLs.isEmpty()) {
+            return null;
+        }
+        JsonArrayBuilder authors = Json.createArrayBuilder();
+        for (String authorURL : datasetAuthorURLs) {
+                authors.add(jsonObjectBuilder().add("href", authorURL));
+        }
+        return authors;
+    }
+
+    private String getItems(List<FileMetadata> fms) {
+        if (fms.size() > maxItems) {
+            logger.fine(String.format("maxItem is %s and fms size is %s", maxItems, fms.size()));
+            return null;
+        }
+
+        String itemString = null;
+        for (FileMetadata fm : fms) {
+            DataFile df = fm.getDataFile();
+            if (itemString == null) {
+                itemString = "<" + getPublicDownloadUrl(df) + ">;rel=\"item\";type=\"" + df.getContentType() + "\"";
+            } else {
+                itemString = String.join(",", itemString, "<" + getPublicDownloadUrl(df) + ">;rel=\"item\";type=\"" + df.getContentType() + "\"");
+            }
+        }
+        return itemString;
+    }
+
+    private JsonArrayBuilder getJsonItems() {
+        JsonArrayBuilder items = Json.createArrayBuilder();
+        for (FileMetadata fm : workingDatasetVersion.getFileMetadatas()) {
+            DataFile df = fm.getDataFile();
+            items.add(jsonObjectBuilder().add("href", getPublicDownloadUrl(df)).add("type", df.getContentType()));
+        }
+
+        return items;
+    }
+    
+    private String getPublicDownloadUrl(DataFile dataFile) {
+        GlobalId gid = dataFile.getGlobalId();
+        return FileUtil.getPublicDownloadUrl(systemConfig.getDataverseSiteUrl(),
+                ((gid != null) ? gid.asString() : null), dataFile.getId());
+    }
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
index 750358b12d7..33c87563104 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/StringUtil.java
@@ -20,6 +20,8 @@
 import javax.crypto.IllegalBlockSizeException;
 import javax.crypto.NoSuchPaddingException;
 import javax.crypto.spec.SecretKeySpec;
+
+import org.apache.commons.lang3.StringUtils;
 import org.jsoup.Jsoup;
 
 /**
@@ -198,4 +200,23 @@ private static SecretKeySpec generateKeyFromString(final String secKey) throws U
         SecretKeySpec secretKeySpec = new SecretKeySpec(key, "AES");
         return secretKeySpec;
     }
+    
+    /**
+     * Normalize sentence
+     * 
+     * @author francesco.cadili@4science.it
+     *
+     *
+     * @param sentence full name or organization name
+     * @return normalize string value
+     */
+    static public String normalize(String sentence) {
+        if (StringUtils.isBlank(sentence)) {
+            return "";
+        }
+
+        sentence = sentence.trim().replaceAll(", *", ", ").replaceAll(" +", " ");
+
+        return sentence;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
index c989add6e3d..3c6992f8ec3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java
@@ -2,6 +2,7 @@
 
 import com.ocpsoft.pretty.PrettyContext;
 import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DataverseServiceBean;
 import edu.harvard.iq.dataverse.DvObjectContainer;
 import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
@@ -12,15 +13,15 @@
 import edu.harvard.iq.dataverse.validation.PasswordValidatorUtil;
 import org.passay.CharacterRule;
 
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.json.JsonString;
-import javax.json.JsonValue;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
 import java.io.StringReader;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
@@ -892,7 +893,7 @@ public String toString() {
         }
 
     }
-
+    
     public boolean isPublicInstall(){
         boolean saneDefault = false;
         return settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, saneDefault);
@@ -940,18 +941,13 @@ public boolean isHTTPDownload() {
     }
 
     public boolean isGlobusDownload() {
-        return getMethodAvailable(FileUploadMethods.GLOBUS.toString(), false);
+        return getMethodAvailable(FileDownloadMethods.GLOBUS.toString(), false);
     }
     
     public boolean isGlobusFileDownload() {
         return (isGlobusDownload() && settingsService.isTrueForKey(SettingsServiceBean.Key.GlobusSingleFileTransfer, false));
     }
 
-    public List<String> getGlobusStoresList() {
-    String globusStores = settingsService.getValueForKey(SettingsServiceBean.Key.GlobusStores, "");
-    return Arrays.asList(globusStores.split("\\s*,\\s*"));
-    }
-
     private Boolean getMethodAvailable(String method, boolean upload) {
         String methods = settingsService.getValueForKey(
                 upload ? SettingsServiceBean.Key.UploadMethods : SettingsServiceBean.Key.DownloadMethods);
@@ -995,9 +991,29 @@ public boolean isAllowCustomTerms() {
         return settingsService.isTrueForKey(SettingsServiceBean.Key.AllowCustomTermsOfUse, safeDefaultIfKeyNotFound);
     }
 
-    public boolean isFilePIDsEnabled() {
-        boolean safeDefaultIfKeyNotFound = true;
-        return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, safeDefaultIfKeyNotFound);
+    public boolean isFilePIDsEnabledForCollection(Dataverse collection) {
+        if (collection == null) {
+            return false;
+        }
+        
+        Dataverse thisCollection = collection; 
+        
+        // If neither enabled nor disabled specifically for this collection,
+        // the parent collection setting is inhereted (recursively): 
+        while (thisCollection.getFilePIDsEnabled() == null) {
+            if (thisCollection.getOwner() == null) {
+                // We've reached the root collection, and file PIDs registration
+                // hasn't been explicitly enabled, therefore we presume that it is
+                // subject to how the registration is configured for the 
+                // entire instance:
+                return settingsService.isTrueForKey(SettingsServiceBean.Key.FilePIDsEnabled, false); 
+            }
+            thisCollection = thisCollection.getOwner();
+        }
+        
+        // If present, the setting of the first direct ancestor collection 
+        // takes precedent:
+        return thisCollection.getFilePIDsEnabled();
     }
     
     public boolean isIndependentHandleService() {
@@ -1024,11 +1040,6 @@ public boolean isDatafileValidationOnPublishEnabled() {
 	public boolean directUploadEnabled(DvObjectContainer container) {
     	return Boolean.getBoolean("dataverse.files." + container.getEffectiveStorageDriverId() + ".upload-redirect");
 	}
-	
-	public String getDataCiteRestApiUrlString() {
-		//As of 5.0 the 'doi.dataciterestapiurlstring' is the documented jvm option. Prior versions used 'doi.mdcbaseurlstring' or were hardcoded to api.datacite.org, so the defaults are for backward compatibility.
-        return System.getProperty("doi.dataciterestapiurlstring", System.getProperty("doi.mdcbaseurlstring", "https://api.datacite.org"));
-	}
         
     public boolean isExternalDataverseValidationEnabled() {
         return settingsService.getValueForKey(SettingsServiceBean.Key.DataverseMetadataValidatorScript) != null;
@@ -1102,9 +1113,8 @@ public Map<String, String[]> getCurationLabels() {
         Map<String, String[]> labelMap = new HashMap<String, String[]>();
         String setting = settingsService.getValueForKey(SettingsServiceBean.Key.AllowedCurationLabels, "");
         if (!setting.isEmpty()) {
-            try {
-                JsonReader jsonReader = Json.createReader(new StringReader(setting));
-
+            try (JsonReader jsonReader = Json.createReader(new StringReader(setting))){
+                
                 Pattern pattern = Pattern.compile("(^[\\w ]+$)"); // alphanumeric, underscore and whitespace allowed
 
                 JsonObject labelSets = jsonReader.readObject();
@@ -1149,4 +1159,18 @@ public boolean isSignupDisabledForRemoteAuthProvider(String providerId) {
         
         return !ret; 
     }
+    
+    public boolean isStorageQuotasEnforced() {
+        return settingsService.isTrueForKey(SettingsServiceBean.Key.UseStorageQuotas, false);
+    }
+    
+    /**
+     * This method should only be used for testing of the new storage quota 
+     * mechanism, temporarily. (it uses the same value as the quota for 
+     * *everybody* regardless of the circumstances, defined as a database 
+     * setting)
+     */
+    public Long getTestStorageQuotaLimit() {
+        return settingsService.getValueForKeyAsLong(SettingsServiceBean.Key.StorageQuotaSizeInBytes);
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java
index 4acf2d544e8..a3293e0cd28 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java
@@ -5,14 +5,22 @@
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
-import javax.json.Json;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
+import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT;
 
 public class URLTokenUtil {
 
@@ -22,6 +30,13 @@ public class URLTokenUtil {
     protected final FileMetadata fileMetadata;
     protected ApiToken apiToken;
     protected String localeCode;
+    
+    
+    public static final String HTTP_METHOD="httpMethod";
+    public static final String TIMEOUT="timeOut";
+    public static final String SIGNED_URL="signedUrl";
+    public static final String NAME="name";
+    public static final String URL_TEMPLATE="urlTemplate";
 
     /**
      * File level
@@ -177,8 +192,7 @@ private String getTokenValue(String value) {
                 }
             }
             if (("DRAFT").equals(versionString)) {
-                versionString = ":draft"; // send the token needed in api calls that can be substituted for a numeric
-                                          // version.
+                versionString = DS_VERSION_DRAFT; // send the token needed in api calls that can be substituted for a numeric version.
             }
             return versionString;
         case FILE_METADATA_ID:
@@ -193,6 +207,58 @@ private String getTokenValue(String value) {
         throw new IllegalArgumentException("Cannot replace reserved word: " + value);
     }
     
+    public JsonObjectBuilder createPostBody(JsonObject params, JsonArray allowedApiCalls) {
+        JsonObjectBuilder bodyBuilder = Json.createObjectBuilder();
+        bodyBuilder.add("queryParameters", params);
+        if (allowedApiCalls != null && !allowedApiCalls.isEmpty()) {
+            JsonArrayBuilder apisBuilder = Json.createArrayBuilder();
+            allowedApiCalls.getValuesAs(JsonObject.class).forEach(((apiObj) -> {
+                logger.fine(JsonUtil.prettyPrint(apiObj));
+                String name = apiObj.getJsonString(NAME).getString();
+                String httpmethod = apiObj.getJsonString(HTTP_METHOD).getString();
+                int timeout = apiObj.getInt(TIMEOUT);
+                String urlTemplate = apiObj.getJsonString(URL_TEMPLATE).getString();
+                logger.fine("URL Template: " + urlTemplate);
+                urlTemplate = SystemConfig.getDataverseSiteUrlStatic() + urlTemplate;
+                String apiPath = replaceTokensWithValues(urlTemplate);
+                logger.fine("URL WithTokens: " + apiPath);
+                String url = apiPath;
+                // Sign if apiToken exists, otherwise send unsigned URL (i.e. for guest users)
+                ApiToken apiToken = getApiToken();
+                if (apiToken != null) {
+                    url = UrlSignerUtil.signUrl(apiPath, timeout, apiToken.getAuthenticatedUser().getUserIdentifier(),
+                            httpmethod, JvmSettings.API_SIGNING_SECRET.lookupOptional().orElse("")
+                                    + getApiToken().getTokenString());
+                }
+                logger.fine("Signed URL: " + url);
+                apisBuilder.add(Json.createObjectBuilder().add(NAME, name).add(HTTP_METHOD, httpmethod)
+                        .add(SIGNED_URL, url).add(TIMEOUT, timeout));
+            }));
+            bodyBuilder.add("signedUrls", apisBuilder);
+        }
+        return bodyBuilder;
+    }
+
+    public JsonObject getParams(JsonObject toolParameters) {
+        //ToDo - why an array of object each with a single key/value pair instead of one object?
+        JsonArray queryParams = toolParameters.getJsonArray("queryParameters");
+    
+        // ToDo return json and print later
+        JsonObjectBuilder paramsBuilder = Json.createObjectBuilder();
+        if (!(queryParams == null) && !queryParams.isEmpty()) {
+            queryParams.getValuesAs(JsonObject.class).forEach((queryParam) -> {
+                queryParam.keySet().forEach((key) -> {
+                    String value = queryParam.getString(key);
+                    JsonValue param = getParam(value);
+                    if (param != null) {
+                        paramsBuilder.add(key, param);
+                    }
+                });
+            });
+        }
+        return paramsBuilder.build();
+    }
+
     public static String getScriptForUrl(String url) {
         String msg = BundleUtil.getStringFromBundle("externaltools.enable.browser.popups");
         String script = "const newWin = window.open('" + url + "', target='_blank'); if (!newWin || newWin.closed || typeof newWin.closed == \"undefined\") {alert(\"" + msg + "\");}";
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java
index c2d9bf67236..acbdc6aa3c6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/WebloaderUtil.java
@@ -1,22 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
-import java.util.Date;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map.Entry;
 import java.util.logging.Logger;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpSession;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpSession;
 
 import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetPage;
-import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
-import edu.harvard.iq.dataverse.authorization.users.User;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
 public class WebloaderUtil {
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
index 920f80901be..b7c44014b80 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/BagGenerator.java
@@ -73,8 +73,10 @@
 
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.DataFile.ChecksumType;
-import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.json.JsonLDTerm;
+import java.util.Optional;
 
 public class BagGenerator {
 
@@ -110,7 +112,7 @@ public class BagGenerator {
 
     private String apiKey = null;
 
-    private javax.json.JsonObject oremapObject;
+    private jakarta.json.JsonObject oremapObject;
     private JsonObject aggregation;
 
     private String dataciteXml;
@@ -208,7 +210,7 @@ public boolean generateBag(OutputStream outputStream) throws Exception {
         aggregation = (JsonObject) new JsonParser().parse(oremapObject.getJsonObject(JsonLDTerm.ore("describes").getLabel()).toString());
 
         String pidUrlString = aggregation.get("@id").getAsString();
-        String pidString=GlobalId.getInternalFormOfPID(pidUrlString);
+        String pidString=PidUtil.parseAsGlobalID(pidUrlString).asString();
         bagID = pidString + "v."
                 + aggregation.get(JsonLDTerm.schemaOrg("version").getLabel()).getAsString();
         
@@ -822,17 +824,20 @@ private String generateInfoFile() {
             logger.warning("No contact info available for BagIt Info file");
         }
 
-        info.append("Source-Organization: " + BundleUtil.getStringFromBundle("bagit.sourceOrganization"));
+        String orgName = JvmSettings.BAGIT_SOURCE_ORG_NAME.lookupOptional(String.class).orElse("Dataverse Installation (<Site Url>)");
+        String orgAddress = JvmSettings.BAGIT_SOURCEORG_ADDRESS.lookupOptional(String.class).orElse("<Full address>");
+        String orgEmail = JvmSettings.BAGIT_SOURCEORG_EMAIL.lookupOptional(String.class).orElse("<Email address>");
+
+        info.append("Source-Organization: " + orgName);
         // ToDo - make configurable
         info.append(CRLF);
 
-        info.append("Organization-Address: " + WordUtils.wrap(
-                BundleUtil.getStringFromBundle("bagit.sourceOrganizationAddress"), 78, CRLF + " ", true));
+        info.append("Organization-Address: " + WordUtils.wrap(orgAddress, 78, CRLF + " ", true));
+
         info.append(CRLF);
 
         // Not a BagIt standard name
-        info.append(
-                "Organization-Email: " + BundleUtil.getStringFromBundle("bagit.sourceOrganizationEmail"));
+        info.append("Organization-Email: " + orgEmail);
         info.append(CRLF);
 
         info.append("External-Description: ");
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
index a6d85e1addb..aa653a6e360 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMap.java
@@ -8,8 +8,10 @@
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObjectContainer;
+import edu.harvard.iq.dataverse.Embargo;
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
@@ -29,22 +31,40 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 
 import org.apache.commons.lang3.exception.ExceptionUtils;
 
+/**
+ * This class is used to generate a JSON-LD representation of a Dataverse object leveraging the OAI_ORE and other community vocabularies. As of v1.0.0,
+ * the format is being versioned and ANY CHANGES TO THE OUTPUT of this class must be reflected in a version increment (see DATAVERSE_ORE_FORMAT_VERSION).
+ * 
+ * The OREMap class is intended to record ALL the information needed to recreate an existing Dataverse dataset. As of v1.0.0, this is true with the 
+ * exception that auxiliary files are not referenced in the OREMap. While many types of auxiliary files will be regenerated automatically based on datafile
+ *  contents, Dataverse now allows manually uploaded auxiliary files and these cannot be reproduced solely from the dataset/datafile contents. 
+ */
 public class OREMap {
 
+    //Required Services
     static SettingsServiceBean settingsService;
     static DatasetFieldServiceBean datasetFieldService;
+    static SystemConfig systemConfig;
+    
     private static final Logger logger = Logger.getLogger(OREMap.class.getCanonicalName());
     
     public static final String NAME = "OREMap";
+    
+    //NOTE: Update this value whenever the output of this class is changed
+    private static final String DATAVERSE_ORE_FORMAT_VERSION = "Dataverse OREMap Format v1.0.0";
+    private static final String DATAVERSE_SOFTWARE_NAME = "Dataverse";
+    private static final String DATAVERSE_SOFTWARE_URL = "https://github.com/iqss/dataverse";
+    
+    
     private Map<String, String> localContext = new TreeMap<String, String>();
     private DatasetVersion version;
     private Boolean excludeEmail = null;
@@ -64,15 +84,15 @@ public void writeOREMap(OutputStream outputStream) throws Exception {
         outputStream.flush();
     }
 
-    public JsonObject getOREMap() throws Exception {
+    public JsonObject getOREMap() {
         return getOREMap(false);
     }
     
-    public JsonObject getOREMap(boolean aggregationOnly) throws Exception {
+    public JsonObject getOREMap(boolean aggregationOnly) {
         return getOREMapBuilder(aggregationOnly).build();
     }
     
-    public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Exception {
+    public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) {
 
         //Set this flag if it wasn't provided
         if(excludeEmail==null) {
@@ -87,13 +107,13 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except
         localContext.putIfAbsent(JsonLDNamespace.schema.getPrefix(), JsonLDNamespace.schema.getUrl());
 
         Dataset dataset = version.getDataset();
-        String id = dataset.getGlobalId().toURL().toExternalForm();
+        String id = dataset.getGlobalId().asURL();
         JsonArrayBuilder fileArray = Json.createArrayBuilder();
         // The map describes an aggregation
         JsonObjectBuilder aggBuilder = Json.createObjectBuilder();
         List<DatasetField> fields = version.getDatasetFields();
         // That has it's own metadata
-        Map<Long, JsonObject> cvocMap = datasetFieldService.getCVocConf(false);
+        Map<Long, JsonObject> cvocMap = datasetFieldService.getCVocConf(true);
         for (DatasetField field : fields) {
             if (!field.isEmpty()) {
                 DatasetFieldType dfType = field.getDatasetFieldType();
@@ -113,6 +133,18 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except
                 .add(JsonLDTerm.schemaOrg("name").getLabel(), version.getTitle())
                 .add(JsonLDTerm.schemaOrg("dateModified").getLabel(), version.getLastUpdateTime().toString());
         addIfNotNull(aggBuilder, JsonLDTerm.schemaOrg("datePublished"), dataset.getPublicationDateFormattedYYYYMMDD());
+        //Add version state info - DRAFT, RELEASED, DEACCESSIONED, ARCHIVED with extra info for DEACCESIONED
+        VersionState vs = version.getVersionState();
+        if(vs.equals(VersionState.DEACCESSIONED)) {
+            JsonObjectBuilder deaccBuilder = Json.createObjectBuilder();
+            deaccBuilder.add(JsonLDTerm.schemaOrg("name").getLabel(), vs.name());
+            deaccBuilder.add(JsonLDTerm.DVCore("reason").getLabel(), version.getVersionNote());
+            addIfNotNull(deaccBuilder, JsonLDTerm.DVCore("forwardUrl"), version.getArchiveNote());
+            aggBuilder.add(JsonLDTerm.schemaOrg("creativeWorkStatus").getLabel(), deaccBuilder);
+            
+        } else {
+            aggBuilder.add(JsonLDTerm.schemaOrg("creativeWorkStatus").getLabel(), vs.name());
+        }
 
         TermsOfUseAndAccess terms = version.getTermsOfUseAndAccess();
         if (terms.getLicense() != null) {
@@ -193,6 +225,17 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except
                 }
                 addIfNotNull(aggRes, JsonLDTerm.schemaOrg("name"), fileName); 
                 addIfNotNull(aggRes, JsonLDTerm.restricted, fmd.isRestricted());
+                Embargo embargo=df.getEmbargo(); 
+                if(embargo!=null) {
+                    String date = embargo.getFormattedDateAvailable();
+                    String reason= embargo.getReason();
+                    JsonObjectBuilder embargoObject = Json.createObjectBuilder();
+                    embargoObject.add(JsonLDTerm.DVCore("dateAvailable").getLabel(), date);
+                    if(reason!=null) {
+                        embargoObject.add(JsonLDTerm.DVCore("reason").getLabel(), reason);
+                    }
+                    aggRes.add(JsonLDTerm.DVCore("embargoed").getLabel(), embargoObject);
+                }
                 addIfNotNull(aggRes, JsonLDTerm.directoryLabel, fmd.getDirectoryLabel());
                 addIfNotNull(aggRes, JsonLDTerm.schemaOrg("version"), fmd.getVersion());
                 addIfNotNull(aggRes, JsonLDTerm.datasetVersionId, fmd.getDatasetVersion().getId());
@@ -211,7 +254,7 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except
                 // File DOI if it exists
                 String fileId = null;
                 String fileSameAs = null;
-                if (df.getGlobalId().asString().length() != 0) {
+                if (df.getGlobalId()!=null) {
                     fileId = df.getGlobalId().asString();
                     fileSameAs = SystemConfig.getDataverseSiteUrlStatic()
                             + "/api/access/datafile/:persistentId?persistentId=" + fileId + (ingested ? "&format=original":"");
@@ -257,10 +300,23 @@ public JsonObjectBuilder getOREMapBuilder(boolean aggregationOnly) throws Except
             return aggBuilder.add("@context", contextBuilder.build());
         } else {
             // Now create the overall map object with it's metadata
+            
+            //Start with a reference to the Dataverse software
+            JsonObjectBuilder dvSoftwareBuilder = Json.createObjectBuilder()
+                    .add("@type", JsonLDTerm.schemaOrg("SoftwareApplication").getLabel())
+                    .add(JsonLDTerm.schemaOrg("name").getLabel(), DATAVERSE_SOFTWARE_NAME)
+                    .add(JsonLDTerm.schemaOrg("version").getLabel(), systemConfig.getVersion(true))
+                    .add(JsonLDTerm.schemaOrg("url").getLabel(), DATAVERSE_SOFTWARE_URL);
+            
+            //Now the OREMAP object itself
             JsonObjectBuilder oremapBuilder = Json.createObjectBuilder()
                     .add(JsonLDTerm.dcTerms("modified").getLabel(), LocalDate.now().toString())
                     .add(JsonLDTerm.dcTerms("creator").getLabel(), BrandingUtil.getInstallationBrandName())
                     .add("@type", JsonLDTerm.ore("ResourceMap").getLabel())
+                    //Add the version of our ORE format used
+                    .add(JsonLDTerm.schemaOrg("additionalType").getLabel(), DATAVERSE_ORE_FORMAT_VERSION)
+                    //Indicate which Dataverse version created it
+                    .add(JsonLDTerm.DVCore("generatedBy").getLabel(), dvSoftwareBuilder)
                     // Define an id for the map itself (separate from the @id of the dataset being
                     // described
                     .add("@id",
@@ -375,23 +431,7 @@ public static JsonValue getJsonLDForField(DatasetField field, Boolean excludeEma
         if (!dfType.isCompound()) {
             for (String val : field.getValues_nondisplay()) {
                 if (cvocMap.containsKey(dfType.getId())) {
-                    try {
-                        JsonObject cvocEntry = cvocMap.get(dfType.getId());
-                        if (cvocEntry.containsKey("retrieval-filtering")) {
-                            JsonObject filtering = cvocEntry.getJsonObject("retrieval-filtering");
-                            JsonObject context = filtering.getJsonObject("@context");
-                            for (String prefix : context.keySet()) {
-                                localContext.putIfAbsent(prefix, context.getString(prefix));
-                            }
-                            vals.add(datasetFieldService.getExternalVocabularyValue(val));
-                        } else {
-                            vals.add(val);
-                        }
-                    } catch (Exception e) {
-                        logger.warning("Couldn't interpret value for : " + val + " : " + e.getMessage());
-                        logger.log(Level.FINE, ExceptionUtils.getStackTrace(e));
-                        vals.add(val);
-                    }
+                    addCvocValue(val, vals, cvocMap.get(dfType.getId()), localContext);
                 } else {
                     vals.add(val);
                 }
@@ -420,15 +460,22 @@ public static JsonValue getJsonLDForField(DatasetField field, Boolean excludeEma
                         }
 
                         List<String> values = dsf.getValues_nondisplay();
-                        if (values.size() > 1) {
-                            JsonArrayBuilder childVals = Json.createArrayBuilder();
 
-                            for (String val : dsf.getValues_nondisplay()) {
+                        JsonArrayBuilder childVals = Json.createArrayBuilder();
+
+                        for (String val : dsf.getValues_nondisplay()) {
+                            logger.fine("Child name: " + dsft.getName());
+                            if (cvocMap.containsKey(dsft.getId())) {
+                                logger.fine("Calling addcvocval for: " + dsft.getName());
+                                addCvocValue(val, childVals, cvocMap.get(dsft.getId()), localContext);
+                            } else {
                                 childVals.add(val);
                             }
+                        }
+                        if (values.size() > 1) {
                             child.add(subFieldName.getLabel(), childVals);
                         } else {
-                            child.add(subFieldName.getLabel(), values.get(0));
+                            child.add(subFieldName.getLabel(), childVals.build().get(0));
                         }
                     }
                 }
@@ -440,8 +487,34 @@ public static JsonValue getJsonLDForField(DatasetField field, Boolean excludeEma
         return (valArray.size() != 1) ? valArray : valArray.get(0);
     }
 
-    public static void injectSettingsService(SettingsServiceBean settingsSvc, DatasetFieldServiceBean datasetFieldSvc) {
+    private static void addCvocValue(String val, JsonArrayBuilder vals, JsonObject cvocEntry,
+            Map<String, String> localContext) {
+        try {
+            if (cvocEntry.containsKey("retrieval-filtering")) {
+                JsonObject filtering = cvocEntry.getJsonObject("retrieval-filtering");
+                JsonObject context = filtering.getJsonObject("@context");
+                for (String prefix : context.keySet()) {
+                    localContext.putIfAbsent(prefix, context.getString(prefix));
+                }
+                JsonObjectBuilder job = Json.createObjectBuilder(datasetFieldService.getExternalVocabularyValue(val));
+                job.add("@id", val);
+                JsonObject extVal = job.build();
+                logger.fine("Adding: " + extVal);
+                vals.add(extVal);
+            } else {
+                vals.add(val);
+            }
+        } catch (Exception e) {
+            logger.warning("Couldn't interpret value for : " + val + " : " + e.getMessage());
+            logger.log(Level.FINE, ExceptionUtils.getStackTrace(e));
+            vals.add(val);
+        }
+    }
+
+    //These are used to pick up various settings/constants from the application
+    public static void injectServices(SettingsServiceBean settingsSvc, DatasetFieldServiceBean datasetFieldSvc, SystemConfig systemCfg) {
         settingsService = settingsSvc;
         datasetFieldService = datasetFieldSvc;
+        systemConfig = systemCfg;
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java
index 6cd7f0928dc..cca1e16b4f8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/OREMapHelper.java
@@ -2,11 +2,11 @@
 
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import edu.harvard.iq.dataverse.util.SystemConfig;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 /**
  * This is a small helper bean 
@@ -22,8 +22,11 @@ public class OREMapHelper {
     @EJB
     DatasetFieldServiceBean datasetFieldSvc;
     
+    @EJB
+    SystemConfig systemConfig;
+    
     @PostConstruct
     public void injectService() {
-        OREMap.injectSettingsService(settingsSvc, datasetFieldSvc);
+        OREMap.injectServices(settingsSvc, datasetFieldSvc, systemConfig);
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java b/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java
index 2bcac04076a..ecb34bdcfb5 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/bagit/data/FileUtilWrapper.java
@@ -3,6 +3,7 @@
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException;
+import edu.harvard.iq.dataverse.util.file.FileExceedsStorageQuotaException;
 import edu.harvard.iq.dataverse.util.FileUtil;
 
 import java.io.File;
@@ -43,7 +44,11 @@ public void deleteFile(Path filePath) {
     }
 
     public File saveInputStreamInTempFile(InputStream inputStream, Long fileSizeLimit) throws IOException, FileExceedsMaxSizeException {
-        return FileUtil.saveInputStreamInTempFile(inputStream, fileSizeLimit);
+        try {
+            return FileUtil.saveInputStreamInTempFile(inputStream, fileSizeLimit);
+        } catch (FileExceedsStorageQuotaException fesqx) {
+            return null; 
+        } 
     }
 
     public String determineFileType(File file, String fileName) throws IOException {
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java b/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java
index 53c80037223..4b0263030dc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactory.java
@@ -7,10 +7,10 @@
 import edu.harvard.iq.dataverse.util.bagit.data.FileDataProviderFactory;
 import edu.harvard.iq.dataverse.util.bagit.data.FileUtilWrapper;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.enterprise.context.SessionScoped;
-import javax.inject.Named;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.enterprise.context.SessionScoped;
+import jakarta.inject.Named;
 import java.io.Serializable;
 import java.util.Optional;
 import java.util.logging.Logger;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java b/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java
new file mode 100644
index 00000000000..29eeca254f7
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/util/file/FileExceedsStorageQuotaException.java
@@ -0,0 +1,22 @@
+/*
+ * To change this license header, choose License Headers in Project Properties.
+ * To change this template file, choose Tools | Templates
+ * and open the template in the editor.
+ */
+package edu.harvard.iq.dataverse.util.file;
+
+/**
+ *
+ * @author landreev
+ */
+public class FileExceedsStorageQuotaException extends Exception {
+
+    public FileExceedsStorageQuotaException(String message) {
+        super(message);
+    }
+
+    public FileExceedsStorageQuotaException(String message, Throwable cause) {
+        super(message, cause);
+    }
+    
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java
index ee0a882a10d..3fcaf6b11ff 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinter.java
@@ -1,11 +1,10 @@
 package edu.harvard.iq.dataverse.util.json;
 
 import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
 import edu.harvard.iq.dataverse.MetadataBlock;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 import edu.harvard.iq.dataverse.workflow.Workflow;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.JsonObjectBuilder;
 
 /**
  * A Json printer that prints minimal data on objects. Useful when embedding 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
index 127632bf711..4fb3ffe6c14 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JSONLDUtil.java
@@ -18,17 +18,17 @@
 import java.util.logging.Logger;
 
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.json.JsonWriter;
-import javax.json.JsonWriterFactory;
-import javax.json.JsonValue.ValueType;
-import javax.json.stream.JsonGenerator;
-import javax.ws.rs.BadRequestException;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonWriter;
+import jakarta.json.JsonWriterFactory;
+import jakarta.json.JsonValue.ValueType;
+import jakarta.json.stream.JsonGenerator;
+import jakarta.ws.rs.BadRequestException;
 
 import edu.harvard.iq.dataverse.ControlledVocabularyValue;
 import edu.harvard.iq.dataverse.Dataset;
@@ -39,6 +39,7 @@
 import edu.harvard.iq.dataverse.DatasetFieldValue;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.GlobalIdServiceBean;
 import edu.harvard.iq.dataverse.MetadataBlock;
 import edu.harvard.iq.dataverse.MetadataBlockServiceBean;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
@@ -51,6 +52,7 @@
 import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
 import edu.harvard.iq.dataverse.license.License;
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
+import jakarta.json.JsonReader;
 
 public class JSONLDUtil {
 
@@ -81,7 +83,7 @@ public static Dataset updateDatasetMDFromJsonLD(Dataset ds, String jsonLDBody,
 
         JsonObject jsonld = decontextualizeJsonLD(jsonLDBody);
         if (migrating) {
-            Optional<GlobalId> maybePid = GlobalId.parse(jsonld.getString("@id"));
+            Optional<GlobalId> maybePid = GlobalIdServiceBean.parse(jsonld.getString("@id"));
             if (maybePid.isPresent()) {
                 ds.setGlobalId(maybePid.get());
             } else {
@@ -239,6 +241,7 @@ else if (key.equals("https://dataverse.org/schema/core#fileRequestAccess")) {
             }
         }
         dsv.setTermsOfUseAndAccess(terms);
+        terms.setDatasetVersion(dsv);
         dsv.setDatasetFields(dsfl);
 
         return dsv;
@@ -348,7 +351,9 @@ public static DatasetVersion deleteDatasetVersionMDFromJsonLD(DatasetVersion dsv
      * @return null if exact match, otherwise return a field without the value to be deleted
      */
     private static DatasetField getReplacementField(DatasetField dsf, JsonArray valArray) {
-        // TODO Auto-generated method stub
+        // TODO Parse valArray and remove any matching entries in the dsf
+        // Until then, delete removes all values of a multivalued field
+        // Doing this on a required field will fail.
         return null;
     }
 
@@ -529,13 +534,11 @@ public static JsonObject decontextualizeJsonLD(String jsonLDString) {
         try (StringReader rdr = new StringReader(jsonLDString)) {
 
             // Use JsonLd to expand/compact to localContext
-            JsonObject jsonld = Json.createReader(rdr).readObject();
-            JsonDocument doc = JsonDocument.of(jsonld);
-            JsonArray array = null;
-            try {
-                array = JsonLd.expand(doc).get();
-                jsonld = JsonLd.compact(JsonDocument.of(array), JsonDocument.of(Json.createObjectBuilder().build()))
-                        .get();
+            try (JsonReader jsonReader = Json.createReader(rdr)) {
+                JsonObject jsonld = jsonReader.readObject();
+                JsonDocument doc = JsonDocument.of(jsonld);
+                JsonArray array = JsonLd.expand(doc).get();
+                jsonld = JsonLd.compact(JsonDocument.of(array), JsonDocument.of(Json.createObjectBuilder().build())).get();
                 // jsonld = array.getJsonObject(0);
                 logger.fine("Decontextualized object: " + jsonld);
                 return jsonld;
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonLDTerm.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonLDTerm.java
index 065097709cf..3193f762538 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonLDTerm.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonLDTerm.java
@@ -29,6 +29,9 @@ public class JsonLDTerm {
     public static JsonLDTerm studyCompletion = JsonLDTerm.DVCore("studyCompletion");
 
     public static JsonLDTerm restricted = JsonLDTerm.DVCore("restricted");
+    public static JsonLDTerm embargoed = JsonLDTerm.DVCore("embargoed");
+    public static JsonLDTerm embargoDateAvailable = JsonLDTerm.DVCore("dateAvailable");
+    public static JsonLDTerm embargoReason = JsonLDTerm.DVCore("reason");
     public static JsonLDTerm directoryLabel = JsonLDTerm.DVCore("directoryLabel");
     public static JsonLDTerm datasetVersionId = JsonLDTerm.DVCore("datasetVersionId");
     public static JsonLDTerm categories = JsonLDTerm.DVCore("categories");
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
index 22e2c6c8d78..984c607aac7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java
@@ -34,7 +34,6 @@
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData;
 import org.apache.commons.validator.routines.DomainValidator;
 
-import java.io.StringReader;
 import java.sql.Timestamp;
 import java.text.ParseException;
 import java.util.ArrayList;
@@ -50,13 +49,12 @@
 import java.util.Set;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.json.JsonString;
-import javax.json.JsonValue;
-import javax.json.JsonValue.ValueType;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
+import jakarta.json.JsonValue;
+import jakarta.json.JsonValue.ValueType;
 
 /**
  * Parses JSON objects into domain objects.
@@ -71,6 +69,7 @@ public class JsonParser {
     MetadataBlockServiceBean blockService;
     SettingsServiceBean settingsService;
     LicenseServiceBean licenseService;
+    HarvestingClient harvestingClient = null; 
     
     /**
      * if lenient, we will accept alternate spellings for controlled vocabulary values
@@ -85,10 +84,15 @@ public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceB
     }
 
     public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService) {
+        this(datasetFieldSvc, blockService, settingsService, licenseService, null);
+    }
+    
+    public JsonParser(DatasetFieldServiceBean datasetFieldSvc, MetadataBlockServiceBean blockService, SettingsServiceBean settingsService, LicenseServiceBean licenseService, HarvestingClient harvestingClient) {
         this.datasetFieldSvc = datasetFieldSvc;
         this.blockService = blockService;
         this.settingsService = settingsService;
         this.licenseService = licenseService;
+        this.harvestingClient = harvestingClient;
     }
 
     public JsonParser() {
@@ -147,6 +151,10 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException {
                 }
             }
         }
+        
+        if (jobj.containsKey("filePIDsEnabled")) {
+            dv.setFilePIDsEnabled(jobj.getBoolean("filePIDsEnabled"));
+        }
 
         /*  We decided that subject is not user set, but gotten from the subject of the dataverse's
             datasets - leavig this code in for now, in case we need to go back to it at some point
@@ -363,7 +371,28 @@ public DatasetVersion parseDatasetVersion(JsonObject obj, DatasetVersion dsv) th
             dsv.setUNF(obj.getString("UNF", null));
             // Terms of Use related fields
             TermsOfUseAndAccess terms = new TermsOfUseAndAccess();
-            License license = parseLicense(obj.getString("license", null));
+
+            License license = null; 
+            
+            try {
+                // This method will attempt to parse the license in the format 
+                // in which it appears in our json exports, as a compound
+                // field, for ex.:
+                // "license": {
+                //    "name": "CC0 1.0",
+                //    "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+                // }
+                license = parseLicense(obj.getJsonObject("license"));
+            } catch (ClassCastException cce) {
+                logger.fine("class cast exception parsing the license section (will try parsing as a string)");
+                // attempt to parse as string: 
+                // i.e. this is for backward compatibility, after the bug in #9155
+                // was fixed, with the old style of encoding the license info 
+                // in input json, for ex.: 
+                // "license" : "CC0 1.0"
+                license = parseLicense(obj.getString("license", null));
+            }
+            
             if (license == null) {
                 terms.setLicense(license);
                 terms.setTermsOfUse(obj.getString("termsOfUse", null));
@@ -421,6 +450,48 @@ private edu.harvard.iq.dataverse.license.License parseLicense(String licenseName
         if (license == null) throw new JsonParseException("Invalid license: " + licenseNameOrUri);
         return license;
     }
+    
+    private edu.harvard.iq.dataverse.license.License parseLicense(JsonObject licenseObj) throws JsonParseException {
+        if (licenseObj == null){
+            boolean safeDefaultIfKeyNotFound = true;
+            if (settingsService.isTrueForKey(SettingsServiceBean.Key.AllowCustomTermsOfUse, safeDefaultIfKeyNotFound)){
+                return null;
+            } else {
+                return licenseService.getDefault();
+            }
+        }
+        
+        String licenseName = licenseObj.getString("name", null);
+        String licenseUri = licenseObj.getString("uri", null);
+        
+        License license = null; 
+        
+        // If uri is provided, we'll try that first. This is an easier lookup
+        // method; the uri is always the same. The name may have been customized
+        // (translated) on this instance, so we may be dealing with such translated
+        // name, if this is exported json that we are processing. Meaning, unlike 
+        // the uri, we cannot simply check it against the name in the License
+        // database table. 
+        if (licenseUri != null) {
+            license = licenseService.getByNameOrUri(licenseUri);
+        }
+        
+        if (license != null) {
+            return license;
+        }
+        
+        if (licenseName == null) {
+            String exMsg = "Invalid or unsupported license section submitted" 
+                    + (licenseUri != null ? ": " + licenseUri : ".");
+            throw new JsonParseException("Invalid or unsupported license section submitted."); 
+        }
+        
+        license = licenseService.getByPotentiallyLocalizedName(licenseName);
+        if (license == null) {
+            throw new JsonParseException("Invalid or unsupported license: " + licenseName);
+        }
+        return license;
+    }
 
     public List<DatasetField> parseMetadataBlocks(JsonObject json) throws JsonParseException {
         Set<String> keys = json.keySet();
@@ -522,7 +593,29 @@ public DataFile parseDataFile(JsonObject datafileJson) {
         if (contentType == null) {
             contentType = "application/octet-stream";
         }
-        String storageIdentifier = datafileJson.getString("storageIdentifier", " ");
+        String storageIdentifier = null;
+        /**
+         * When harvesting from other Dataverses using this json format, we 
+         * don't want to import their storageidentifiers verbatim. Instead, we 
+         * will modify them to point to the access API location on the remote
+         * archive side.
+         */
+        if (harvestingClient != null && datafileJson.containsKey("id")) {
+            String remoteId = datafileJson.getJsonNumber("id").toString();
+            storageIdentifier = harvestingClient.getArchiveUrl()
+                    + "/api/access/datafile/"
+                    + remoteId;
+            /**
+             * Note that we don't have any practical use for these urls as 
+             * of now. We used to, in the past, perform some tasks on harvested
+             * content that involved trying to access the files. In any event, it
+             * makes more sense to collect these urls, than the storage 
+             * identifiers imported as is, which become completely meaningless 
+             * on the local system.
+             */
+        } else {
+            storageIdentifier = datafileJson.getString("storageIdentifier", null);
+        }
         JsonObject checksum = datafileJson.getJsonObject("checksum");
         if (checksum != null) {
             // newer style that allows for SHA-1 rather than MD5
@@ -587,8 +680,7 @@ private DatasetField remapGeographicCoverage(CompoundVocabularyException ex) thr
         // convert DTO to datasetField so we can back valid values.
         Gson gson = new Gson();
         String jsonString = gson.toJson(geoCoverageDTO);
-        JsonReader jsonReader = Json.createReader(new StringReader(jsonString));
-        JsonObject obj = jsonReader.readObject();
+        JsonObject obj = JsonUtil.getJsonObject(jsonString);
         DatasetField geoCoverageField = parseField(obj);
 
         // add back valid values
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
index 9f5401f77d1..cfc266f2ba7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java
@@ -1,23 +1,6 @@
 package edu.harvard.iq.dataverse.util.json;
 
 import edu.harvard.iq.dataverse.*;
-import edu.harvard.iq.dataverse.AuxiliaryFile;
-import edu.harvard.iq.dataverse.ControlledVocabularyValue;
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.DataFileTag;
-import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetDistributor;
-import edu.harvard.iq.dataverse.DatasetFieldType;
-import edu.harvard.iq.dataverse.DatasetField;
-import edu.harvard.iq.dataverse.DatasetFieldCompoundValue;
-import edu.harvard.iq.dataverse.DatasetFieldValue;
-import edu.harvard.iq.dataverse.DatasetLock;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.Dataverse;
-import edu.harvard.iq.dataverse.DataverseContact;
-import edu.harvard.iq.dataverse.DataverseFacet;
-import edu.harvard.iq.dataverse.DataverseTheme;
-import edu.harvard.iq.dataverse.api.Datasets;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.groups.impl.maildomain.MailDomainGroup;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
@@ -35,11 +18,19 @@
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.dataaccess.DataAccess;
 import edu.harvard.iq.dataverse.dataset.DatasetUtil;
+import edu.harvard.iq.dataverse.datavariable.CategoryMetadata;
+import edu.harvard.iq.dataverse.datavariable.DataVariable;
+import edu.harvard.iq.dataverse.datavariable.SummaryStatistic;
+import edu.harvard.iq.dataverse.datavariable.VarGroup;
+import edu.harvard.iq.dataverse.datavariable.VariableCategory;
+import edu.harvard.iq.dataverse.datavariable.VariableMetadata;
+import edu.harvard.iq.dataverse.datavariable.VariableRange;
 import edu.harvard.iq.dataverse.license.License;
 import edu.harvard.iq.dataverse.globus.FileDetailsHolder;
 import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.DatasetFieldWalker;
 import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder;
 
@@ -47,9 +38,10 @@
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepData;
 
 import java.util.*;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObjectBuilder;
+
 import java.util.function.BiConsumer;
 import java.util.function.BinaryOperator;
 import java.util.function.Function;
@@ -59,10 +51,10 @@
 import java.util.stream.Collectors;
 import static java.util.stream.Collectors.toList;
 
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
 
 /**
  * Convert objects to Json.
@@ -294,6 +286,9 @@ public static JsonObjectBuilder json(Dataverse dv, Boolean hideEmail) {
         if(dv.getStorageDriverId() != null) {
         	bld.add("storageDriverLabel", DataAccess.getStorageDriverLabelFor(dv.getStorageDriverId()));
         }
+        if (dv.getFilePIDsEnabled() != null) {
+            bld.add("filePIDsEnabled", dv.getFilePIDsEnabled());
+        }
 
         return bld;
     }
@@ -356,24 +351,34 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) {
                 .add("mime",ds.getMime()));
     }
 
-    public static JsonObjectBuilder json(DatasetVersion dsv) {
+    public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) {
+        return json(dsv, null, includeFiles);
+    }
+
+    public static JsonObjectBuilder json(DatasetVersion dsv, List<String> anonymizedFieldTypeNamesList, boolean includeFiles) {
+    /*    return json(dsv, null, includeFiles, null);
+    }
+    public static JsonObjectBuilder json(DatasetVersion dsv, List<String> anonymizedFieldTypeNamesList, boolean includeFiles, Long numberOfFiles) {*/
+        Dataset dataset = dsv.getDataset();
         JsonObjectBuilder bld = jsonObjectBuilder()
-                .add("id", dsv.getId()).add("datasetId", dsv.getDataset().getId())
-                .add("datasetPersistentId", dsv.getDataset().getGlobalId().asString())
-                .add("storageIdentifier", dsv.getDataset().getStorageIdentifier())
+                .add("id", dsv.getId()).add("datasetId", dataset.getId())
+                .add("datasetPersistentId", dataset.getGlobalId().asString())
+                .add("storageIdentifier", dataset.getStorageIdentifier())
                 .add("versionNumber", dsv.getVersionNumber()).add("versionMinorNumber", dsv.getMinorVersionNumber())
                 .add("versionState", dsv.getVersionState().name()).add("versionNote", dsv.getVersionNote())
                 .add("archiveNote", dsv.getArchiveNote()).add("deaccessionLink", dsv.getDeaccessionLink())
                 .add("distributionDate", dsv.getDistributionDate()).add("productionDate", dsv.getProductionDate())
                 .add("UNF", dsv.getUNF()).add("archiveTime", format(dsv.getArchiveTime()))
                 .add("lastUpdateTime", format(dsv.getLastUpdateTime())).add("releaseTime", format(dsv.getReleaseTime()))
-                .add("createTime", format(dsv.getCreateTime()));
-        License license = DatasetUtil.getLicense(dsv);;
+                .add("createTime", format(dsv.getCreateTime()))
+                .add("alternativePersistentId", dataset.getAlternativePersistentIdentifier())
+                .add("publicationDate", dataset.getPublicationDateFormattedYYYYMMDD())
+                .add("citationDate", dataset.getCitationDateFormattedYYYYMMDD());
+                //.add("numberOfFiles", numberOfFiles);
+        
+        License license = DatasetUtil.getLicense(dsv);
         if (license != null) {
-            // Standard license
-            bld.add("license", jsonObjectBuilder()
-                    .add("name", DatasetUtil.getLicenseName(dsv))
-                    .add("uri", DatasetUtil.getLicenseURI(dsv)));
+            bld.add("license", jsonLicense(dsv));
         } else {
             // Custom terms
             bld.add("termsOfUse", dsv.getTermsOfUseAndAccess().getTermsOfUse())
@@ -394,14 +399,17 @@ public static JsonObjectBuilder json(DatasetVersion dsv) {
                 .add("studyCompletion", dsv.getTermsOfUseAndAccess().getStudyCompletion())
                 .add("fileAccessRequest", dsv.getTermsOfUseAndAccess().isFileAccessRequest());
 
-        bld.add("metadataBlocks", jsonByBlocks(dsv.getDatasetFields()));
-
-        bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas()));
+        bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ?
+                jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList)
+                : jsonByBlocks(dsv.getDatasetFields())
+        );
+        if (includeFiles) {
+            bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas()));
+        }
 
         return bld;
     }
-    
-    
+
     public static JsonObjectBuilder jsonDataFileList(List<DataFile> dataFiles){
     
         if (dataFiles==null){
@@ -429,8 +437,8 @@ public static JsonObjectBuilder jsonDataFileList(List<DataFile> dataFiles){
      * to the regular `json` method for DatasetVersion? Will anything break?
      * Unit tests for that method could not be found.
      */
-    public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv) {
-        JsonObjectBuilder dsvWithCitation = JsonPrinter.json(dsv);
+    public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv, boolean includeFiles) {
+        JsonObjectBuilder dsvWithCitation = JsonPrinter.json(dsv, includeFiles);
         dsvWithCitation.add("citation", dsv.getCitation());
         return dsvWithCitation;
     }
@@ -449,7 +457,7 @@ public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv) {
      */
     public static JsonObjectBuilder jsonAsDatasetDto(DatasetVersion dsv) {
         JsonObjectBuilder datasetDtoAsJson = JsonPrinter.json(dsv.getDataset());
-        datasetDtoAsJson.add("datasetVersion", jsonWithCitation(dsv));
+        datasetDtoAsJson.add("datasetVersion", jsonWithCitation(dsv, true));
         return datasetDtoAsJson;
     }
 
@@ -474,11 +482,15 @@ public static JsonObjectBuilder json(DatasetDistributor dist) {
     }
 
     public static JsonObjectBuilder jsonByBlocks(List<DatasetField> fields) {
+        return jsonByBlocks(fields, null);
+    }
+
+    public static JsonObjectBuilder jsonByBlocks(List<DatasetField> fields, List<String> anonymizedFieldTypeNamesList) {
         JsonObjectBuilder blocksBld = jsonObjectBuilder();
 
         for (Map.Entry<MetadataBlock, List<DatasetField>> blockAndFields : DatasetField.groupByBlock(fields).entrySet()) {
             MetadataBlock block = blockAndFields.getKey();
-            blocksBld.add(block.getName(), JsonPrinter.json(block, blockAndFields.getValue()));
+            blocksBld.add(block.getName(), JsonPrinter.json(block, blockAndFields.getValue(), anonymizedFieldTypeNamesList));
         }
         return blocksBld;
     }
@@ -492,14 +504,18 @@ public static JsonObjectBuilder jsonByBlocks(List<DatasetField> fields) {
      * @return JSON Object builder with the block and fields information.
      */
     public static JsonObjectBuilder json(MetadataBlock block, List<DatasetField> fields) {
+        return json(block, fields, null);
+    }
+
+    public static JsonObjectBuilder json(MetadataBlock block, List<DatasetField> fields, List<String> anonymizedFieldTypeNamesList) {
         JsonObjectBuilder blockBld = jsonObjectBuilder();
 
         blockBld.add("displayName", block.getDisplayName());
         blockBld.add("name", block.getName());
         
         final JsonArrayBuilder fieldsArray = Json.createArrayBuilder();
-        Map<Long, JsonObject> cvocMap = (datasetFieldService==null) ? new HashMap<Long, JsonObject>() :datasetFieldService.getCVocConf(false);
-        DatasetFieldWalker.walk(fields, settingsService, cvocMap, new DatasetFieldsToJson(fieldsArray));
+        Map<Long, JsonObject> cvocMap = (datasetFieldService==null) ? new HashMap<Long, JsonObject>() :datasetFieldService.getCVocConf(true);
+        DatasetFieldWalker.walk(fields, settingsService, cvocMap, new DatasetFieldsToJson(fieldsArray, anonymizedFieldTypeNamesList));
 
         blockBld.add("fields", fieldsArray);
         return blockBld;
@@ -520,7 +536,7 @@ public static JsonObject json(DatasetField dfv) {
             return null;
         } else {
             JsonArrayBuilder fieldArray = Json.createArrayBuilder();
-            Map<Long, JsonObject> cvocMap = (datasetFieldService==null) ? new HashMap<Long, JsonObject>() :datasetFieldService.getCVocConf(false);
+            Map<Long, JsonObject> cvocMap = (datasetFieldService==null) ? new HashMap<Long, JsonObject>() :datasetFieldService.getCVocConf(true);
             DatasetFieldWalker.walk(dfv, new DatasetFieldsToJson(fieldArray), cvocMap);
             JsonArray out = fieldArray.build();
             return out.getJsonObject(0);
@@ -553,6 +569,7 @@ public static JsonObjectBuilder json(DatasetFieldType fld) {
         fieldsBld.add("description", fld.getDescription());
         fieldsBld.add("multiple", fld.isAllowMultiples());
         fieldsBld.add("isControlledVocabulary", fld.isControlledVocabulary());
+        fieldsBld.add("displayFormat", fld.getDisplayFormat());
         if (fld.isControlledVocabulary()) {
             // If the field has a controlled vocabulary,
             // add all values to the resulting JSON
@@ -588,7 +605,7 @@ public static JsonObjectBuilder json(FileMetadata fmd) {
                 .add("version", fmd.getVersion())
                 .add("datasetVersionId", fmd.getDatasetVersion().getId())
                 .add("categories", getFileCategories(fmd))
-                .add("dataFile", JsonPrinter.json(fmd.getDataFile(), fmd));
+                .add("dataFile", JsonPrinter.json(fmd.getDataFile(), fmd, false));
     }
 
       public static JsonObjectBuilder json(AuxiliaryFile auxFile) {
@@ -604,10 +621,10 @@ public static JsonObjectBuilder json(AuxiliaryFile auxFile) {
                 .add("dataFile", JsonPrinter.json(auxFile.getDataFile()));
     }
     public static JsonObjectBuilder json(DataFile df) {
-        return JsonPrinter.json(df, null);
+        return JsonPrinter.json(df, null, false);
     }
     
-    public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) {
+    public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boolean forExportDataProvider) {
         // File names are no longer stored in the DataFile entity; 
         // (they are instead in the FileMetadata (as "labels") - this way 
         // the filename can change between versions... 
@@ -625,27 +642,25 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) {
         }
          
         fileName = fileMetadata.getLabel();
-        
-        String pidURL = "";
-        
-        if (new GlobalId(df).toURL() != null){
-            pidURL = new GlobalId(df).toURL().toString();
-        }
+        GlobalId filePid = df.getGlobalId();
+        String pidURL = (filePid!=null)? filePid.asURL(): null;
+        //For backward compatibility - prior to #8674, asString() returned "" for the value when no PID exists.
+        String pidString = (filePid!=null)? filePid.asString(): "";
 
         JsonObjectBuilder embargo = df.getEmbargo() != null ? JsonPrinter.json(df.getEmbargo()) : null;
 
-        return jsonObjectBuilder()
+        NullSafeJsonBuilder builder = jsonObjectBuilder()
                 .add("id", df.getId())
-                .add("persistentId", df.getGlobalIdString())
+                .add("persistentId", pidString)
                 .add("pidURL", pidURL)
                 .add("filename", fileName)
                 .add("contentType", df.getContentType())
+                .add("friendlyType", df.getFriendlyType())
                 .add("filesize", df.getFilesize())
                 .add("description", fileMetadata.getDescription())
                 .add("categories", getFileCategories(fileMetadata))
                 .add("embargo", embargo)
                 //.add("released", df.isReleased())
-                //.add("restricted", df.isRestricted())
                 .add("storageIdentifier", df.getStorageIdentifier())
                 .add("originalFileFormat", df.getOriginalFileFormat())
                 .add("originalFormatLabel", df.getOriginalFormatLabel())
@@ -664,11 +679,178 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata) {
                 //---------------------------------------------
                 .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue()))
                 .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue()))
+                .add("tabularData", df.isTabularData())
                 .add("tabularTags", getTabularFileTags(df))
-                .add("creationDate",  df.getCreateDateFormattedYYYYMMDD())
+                .add("creationDate", df.getCreateDateFormattedYYYYMMDD())
+                .add("publicationDate",  df.getPublicationDateFormattedYYYYMMDD());
+        Dataset dfOwner = df.getOwner();
+        if (dfOwner != null) {
+            builder.add("fileAccessRequest", dfOwner.isFileAccessRequest());
+        }
+        /*
+         * The restricted state was not included prior to #9175 so to avoid backward
+         * incompatability, it is now only added when generating json for the
+         * InternalExportDataProvider fileDetails.
+         */
+        if (forExportDataProvider) {
+            builder.add("restricted", df.isRestricted())
+            .add("fileMetadataId", fileMetadata.getId())
+            .add("dataTables", df.getDataTables().isEmpty() ? null : JsonPrinter.jsonDT(df.getDataTables()))
+            .add("varGroups", fileMetadata.getVarGroups().isEmpty()
+                    ? JsonPrinter.jsonVarGroup(fileMetadata.getVarGroups())
+                    : null);
+        }
+        return builder;
+    }
+    
+    //Started from https://github.com/RENCI-NRIG/dataverse/, i.e. https://github.com/RENCI-NRIG/dataverse/commit/2b5a1225b42cf1caba85e18abfeb952171c6754a
+    public static JsonArrayBuilder jsonDT(List<DataTable> ldt) {
+        JsonArrayBuilder ldtArr = Json.createArrayBuilder();
+        for(DataTable dt: ldt){
+            ldtArr.add(JsonPrinter.json(dt));
+        }
+        return ldtArr;
+    }
+
+    public static JsonObjectBuilder json(DataTable dt) {
+        return jsonObjectBuilder()
+                .add("varQuantity", dt.getVarQuantity())
+                .add("caseQuantity", dt.getCaseQuantity())
+                .add("recordsPerCase", dt.getRecordsPerCase())
+                .add("UNF", dt.getUnf())
+                .add("dataVariables", JsonPrinter.jsonDV(dt.getDataVariables()))
                 ;
     }
+
+    public static JsonArrayBuilder jsonDV(List<DataVariable> dvl) {
+        JsonArrayBuilder varArr = Json.createArrayBuilder();
+        if(dvl!=null){
+            for (DataVariable dv: dvl){
+                varArr.add(JsonPrinter.json(dv));
+            }
+        }
+        return varArr;
+    }
+
+    // TODO: add sumstat and variable categories, check formats
+    public static JsonObjectBuilder json(DataVariable dv) {
+    return jsonObjectBuilder()
+            .add("id", dv.getId())
+            .add("name", dv.getName())
+            .add("label", dv.getLabel())
+            .add("weighted", dv.isWeighted())
+            .add("variableIntervalType", dv.getIntervalLabel())
+            .add("variableFormatType", dv.getType().name()) // varFormat
+            .add("formatCategory", dv.getFormatCategory())
+            .add("format", dv.getFormat())
+            .add("isOrderedCategorical", dv.isOrderedCategorical()) 
+            .add("fileOrder", dv.getFileOrder()) 
+            .add("UNF",dv.getUnf())
+            .add("fileStartPosition", dv.getFileStartPosition())
+            .add("fileEndPosition", dv.getFileEndPosition())
+            .add("recordSegmentNumber", dv.getRecordSegmentNumber())
+            .add("numberOfDecimalPoints",dv.getNumberOfDecimalPoints())
+            .add("variableMetadata",jsonVarMetadata(dv.getVariableMetadatas()))
+            .add("invalidRanges", dv.getInvalidRanges().isEmpty() ? null : JsonPrinter.jsonInvalidRanges(dv.getInvalidRanges()))
+            .add("summaryStatistics", dv.getSummaryStatistics().isEmpty() ? null : JsonPrinter.jsonSumStat(dv.getSummaryStatistics()))
+            .add("variableCategories", dv.getCategories().isEmpty() ? null : JsonPrinter.jsonCatStat(dv.getCategories()))
+            ;
+    }
+
+    private static JsonArrayBuilder jsonInvalidRanges(Collection<VariableRange> invalidRanges) {
+        JsonArrayBuilder invRanges = Json.createArrayBuilder();
+        JsonObjectBuilder job = Json.createObjectBuilder();
+        for (VariableRange vr: invalidRanges){
+            job.add("beginValue", vr.getBeginValue())
+            .add("hasBeginValueType", vr.getBeginValueType()!=null)
+            .add("isBeginValueTypePoint", vr.isBeginValueTypePoint())
+            .add("isBeginValueTypeMin", vr.isBeginValueTypeMin())
+            .add("isBeginValueTypeMinExcl", vr.isBeginValueTypeMinExcl())
+            .add("isBeginValueTypeMax", vr.isBeginValueTypeMax())
+            .add("isBeginValueTypeMaxExcl", vr.isBeginValueTypeMaxExcl())
+            .add("endValue", vr.getEndValue())
+            .add("hasEndValueType", vr.getEndValueType()!=null)
+            .add("endValueTypeMax", vr.isEndValueTypeMax())
+            .add("endValueTypeMaxExcl", vr.isEndValueTypeMaxExcl());
+            
+            invRanges.add(job);
+        }
+        return invRanges;
+    }
+
+    private static JsonObjectBuilder jsonSumStat(Collection<SummaryStatistic> sumStat){
+        //JsonArrayBuilder sumStatArr = Json.createArrayBuilder();
+        JsonObjectBuilder sumStatObj = Json.createObjectBuilder();
+        for (SummaryStatistic stat: sumStat){
+            String label = stat.getTypeLabel()==null ? "unknown":stat.getTypeLabel();
+            sumStatObj.add(label, stat.getValue());
+        }
+        return sumStatObj;
+    }
+
+
+    private static JsonArrayBuilder jsonCatStat(Collection<VariableCategory> catStat){
+        JsonArrayBuilder catArr = Json.createArrayBuilder();
+
+        for (VariableCategory stat: catStat){
+            JsonObjectBuilder catStatObj = Json.createObjectBuilder();
+            catStatObj.add("label", stat.getLabel())
+                      .add("value", stat.getValue())
+                      .add("isMissing", stat.isMissing());
+            if(stat.getFrequency()!=null){
+                catStatObj.add("frequency", stat.getFrequency());
+            }
+            catArr.add(catStatObj);
+        }
+        return catArr;
+    }
+    
+    private static JsonArrayBuilder jsonVarGroup(List<VarGroup> varGroups) {
+        JsonArrayBuilder vgArr = Json.createArrayBuilder();
+        for (VarGroup vg : varGroups) {
+            JsonObjectBuilder vgJson = jsonObjectBuilder().add("id", vg.getId()).add("label", vg.getLabel());
+            JsonArrayBuilder jab = Json.createArrayBuilder();
+            for (DataVariable dvar : vg.getVarsInGroup()) {
+                jab.add(dvar.getId());
+            }
+            vgJson.add("dataVariableIds", jab);
+            vgArr.add(vgJson);
+        }
+        return vgArr;
+    }
+    
+    private static JsonArrayBuilder jsonVarMetadata(Collection<VariableMetadata> varMetadatas) {
+        JsonArrayBuilder vmArr = Json.createArrayBuilder();
+        for (VariableMetadata vm : varMetadatas) {
+            JsonObjectBuilder vmJson = jsonObjectBuilder()
+                    .add("id", vm.getId())
+                    .add("fileMetadataId", vm.getFileMetadata().getId())
+                    .add("label", vm.getLabel())
+                    .add("isWeightVar", vm.isIsweightvar())
+                    .add("isWeighted",vm.isWeighted())
+                    .add("weightVariableId", (vm.getWeightvariable()==null) ? null : vm.getWeightvariable().getId())
+                    .add("literalQuestion", vm.getLiteralquestion())
+                    .add("interviewInstruction", vm.getInterviewinstruction())
+                    .add("postQuestion", vm.getPostquestion())
+                    .add("universe", vm.getUniverse())
+                    .add("notes", vm.getNotes())
+                    .add("categoryMetadatas",json(vm.getCategoriesMetadata()));
+            JsonArrayBuilder jab = Json.createArrayBuilder();
+        }
+        return vmArr;
+    }
     
+    private static JsonArrayBuilder json(Collection<CategoryMetadata> categoriesMetadata) {
+        JsonArrayBuilder cmArr = Json.createArrayBuilder();
+        for(CategoryMetadata cm: categoriesMetadata) {
+            JsonObjectBuilder job = jsonObjectBuilder()
+                    .add("wFreq", cm.getWfreq())
+                    .add("categoryValue", cm.getCategory().getValue());
+            cmArr.add(job);
+        }
+        return cmArr;
+    }
+
     public static JsonObjectBuilder json(HarvestingClient harvestingClient) {
         if (harvestingClient == null) {
             return null; 
@@ -736,16 +918,20 @@ private static class DatasetFieldsToJson implements DatasetFieldWalker.Listener
 
         Deque<JsonObjectBuilder> objectStack = new LinkedList<>();
         Deque<JsonArrayBuilder> valueArrStack = new LinkedList<>();
-        JsonObjectBuilder result = null;
-
+        List<String> anonymizedFieldTypeNamesList = null;
         DatasetFieldsToJson(JsonArrayBuilder result) {
             valueArrStack.push(result);
         }
 
+        DatasetFieldsToJson(JsonArrayBuilder result, List<String> anonymizedFieldTypeNamesList) {
+            this(result);
+            this.anonymizedFieldTypeNamesList = anonymizedFieldTypeNamesList;
+        }
+
         @Override
         public void startField(DatasetField f) {
             objectStack.push(jsonObjectBuilder());
-            // Invariant: all values are multiple. Diffrentiation between multiple and single is done at endField.
+            // Invariant: all values are multiple. Differentiation between multiple and single is done at endField.
             valueArrStack.push(Json.createArrayBuilder());
 
             DatasetFieldType typ = f.getDatasetFieldType();
@@ -756,7 +942,7 @@ public void startField(DatasetField f) {
 
         @Override
         public void addExpandedValuesArray(DatasetField f) {
-            // Invariant: all values are multiple. Diffrentiation between multiple and single is done at endField.
+            // Invariant: all values are multiple. Differentiation between multiple and single is done at endField.
             valueArrStack.push(Json.createArrayBuilder());
         }
 
@@ -766,15 +952,19 @@ public void endField(DatasetField f) {
             JsonArray expandedValues = valueArrStack.pop().build();
             JsonArray jsonValues = valueArrStack.pop().build();
             if (!jsonValues.isEmpty()) {
-                jsonField.add("value",
-                        f.getDatasetFieldType().isAllowMultiples() ? jsonValues
-                                : jsonValues.get(0));
-                if (!expandedValues.isEmpty()) {
-                    jsonField.add("expandedvalue",
-                            f.getDatasetFieldType().isAllowMultiples() ? expandedValues
-                                    : expandedValues.get(0));
+                String datasetFieldName = f.getDatasetFieldType().getName();
+                if (anonymizedFieldTypeNamesList != null && anonymizedFieldTypeNamesList.contains(datasetFieldName)) {
+                    anonymizeField(jsonField);
+                } else {
+                    jsonField.add("value",
+                            f.getDatasetFieldType().isAllowMultiples() ? jsonValues
+                                    : jsonValues.get(0));
+                    if (!expandedValues.isEmpty()) {
+                        jsonField.add("expandedvalue",
+                                f.getDatasetFieldType().isAllowMultiples() ? expandedValues
+                                        : expandedValues.get(0));
+                    }
                 }
-
                 valueArrStack.peek().add(jsonField);
             }
         }
@@ -819,6 +1009,12 @@ public void endCompoundValue(DatasetFieldCompoundValue dsfcv) {
                 valueArrStack.peek().add(jsonField);
             }
         }
+
+        private void anonymizeField(JsonObjectBuilder jsonField) {
+            jsonField.add("typeClass", "primitive");
+            jsonField.add("value", BundleUtil.getStringFromBundle("dataset.anonymized.withheld"));
+            jsonField.add("multiple", false);
+        }
     }
 
     public static JsonObjectBuilder json(AuthenticationProviderRow aRow) {
@@ -912,6 +1108,30 @@ public Set<Collector.Characteristics> characteristics() {
         };
     }
 
+    public static JsonObjectBuilder json(Map<String, Long> map) {
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        for (Map.Entry<String, Long> mapEntry : map.entrySet()) {
+            jsonObjectBuilder.add(mapEntry.getKey(), mapEntry.getValue());
+        }
+        return jsonObjectBuilder;
+    }
+
+    public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map<FileSearchCriteria.FileAccessStatus, Long> map) {
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        for (Map.Entry<FileSearchCriteria.FileAccessStatus, Long> mapEntry : map.entrySet()) {
+            jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue());
+        }
+        return jsonObjectBuilder;
+    }
+
+    public static JsonObjectBuilder jsonFileCountPerTabularTagNameMap(Map<DataFileTag.TagType, Long> map) {
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        for (Map.Entry<DataFileTag.TagType, Long> mapEntry : map.entrySet()) {
+            jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue());
+        }
+        return jsonObjectBuilder;
+    }
+
     public static Collector<JsonObjectBuilder, ArrayList<JsonObjectBuilder>, JsonArrayBuilder> toJsonArray() {
         return new Collector<JsonObjectBuilder, ArrayList<JsonObjectBuilder>, JsonArrayBuilder>() {
 
@@ -979,4 +1199,34 @@ public static JsonObjectBuilder mapToObject(Map<String,String> in) {
         in.keySet().forEach( k->b.add(k, in.get(k)) );
         return b;
     }
+
+
+    /**
+     * Get signposting from Dataset
+     * @param ds the designated Dataset
+     * @return json linkset
+     */
+    public static JsonObjectBuilder jsonLinkset(Dataset ds) {
+        return jsonObjectBuilder()
+                .add("anchor", ds.getPersistentURL())
+                .add("cite-as", Json.createArrayBuilder().add(jsonObjectBuilder().add("href", ds.getPersistentURL())))
+                .add("type", Json.createArrayBuilder().add(jsonObjectBuilder().add("href", "https://schema.org/AboutPage")))
+                .add("author", ds.getPersistentURL())
+                .add("protocol", ds.getProtocol())
+                .add("authority", ds.getAuthority())
+                .add("publisher", BrandingUtil.getInstallationBrandName())
+                .add("publicationDate", ds.getPublicationDateFormattedYYYYMMDD())
+                .add("storageIdentifier", ds.getStorageIdentifier());
+    }
+
+    private static JsonObjectBuilder jsonLicense(DatasetVersion dsv) {
+        JsonObjectBuilder licenseJsonObjectBuilder = jsonObjectBuilder()
+                .add("name", DatasetUtil.getLicenseName(dsv))
+                .add("uri", DatasetUtil.getLicenseURI(dsv));
+        String licenseIconUri = DatasetUtil.getLicenseIcon(dsv);
+        if (licenseIconUri != null) {
+            licenseJsonObjectBuilder.add("iconUri", licenseIconUri);
+        }
+        return licenseJsonObjectBuilder;
+    }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
index 1c7dce24680..55f9ecb5ce8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinterHelper.java
@@ -3,10 +3,10 @@
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
-import javax.annotation.PostConstruct;
-import javax.ejb.EJB;
-import javax.ejb.Singleton;
-import javax.ejb.Startup;
+import jakarta.annotation.PostConstruct;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Singleton;
+import jakarta.ejb.Startup;
 
 /**
  * This is a small helper bean 
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java
index d02099eddb5..72a1cd2e1eb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonUtil.java
@@ -1,35 +1,40 @@
 package edu.harvard.iq.dataverse.util.json;
 
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonObject;
-
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
 import java.io.StringReader;
 import java.io.StringWriter;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonWriter;
-import javax.json.JsonWriterFactory;
-import javax.json.stream.JsonGenerator;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonException;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonWriter;
+import jakarta.json.JsonWriterFactory;
+import jakarta.json.stream.JsonGenerator;
 
 public class JsonUtil {
 
     private static final Logger logger = Logger.getLogger(JsonUtil.class.getCanonicalName());
 
+    private JsonUtil() {}
+
     /**
      * Make an attempt at pretty printing a String but will return the original
      * string if it isn't JSON or if there is any exception.
      */
     public static String prettyPrint(String jsonString) {
         try {
-            com.google.gson.JsonParser jsonParser = new com.google.gson.JsonParser();
-            JsonObject jsonObject = jsonParser.parse(jsonString).getAsJsonObject();
-            Gson gson = new GsonBuilder().setPrettyPrinting().create();
-            String prettyJson = gson.toJson(jsonObject);
-            return prettyJson;
+            if (jsonString.trim().startsWith("{")) {
+                return prettyPrint(getJsonObject(jsonString));
+            } else {
+                return prettyPrint(getJsonArray(jsonString));
+            }
         } catch (Exception ex) {
             logger.info("Returning original string due to exception: " + ex);
             return jsonString;
@@ -47,7 +52,7 @@ public static String prettyPrint(JsonArray jsonArray) {
         return stringWriter.toString();
     }
 
-    public static String prettyPrint(javax.json.JsonObject jsonObject) {
+    public static String prettyPrint(JsonObject jsonObject) {
         Map<String, Boolean> config = new HashMap<>();
         config.put(JsonGenerator.PRETTY_PRINTING, true);
         JsonWriterFactory jsonWriterFactory = Json.createWriterFactory(config);
@@ -57,16 +62,73 @@ public static String prettyPrint(javax.json.JsonObject jsonObject) {
         }
         return stringWriter.toString();
     }
-    
-    public static javax.json.JsonObject getJsonObject(String serializedJson) {
+
+    /**
+     * Return the contents of the string as a JSON object.
+     * This method closes its resources when an exception occurs, but does
+     * not catch any exceptions.
+     * @param serializedJson the JSON object serialized as a {@code String}
+     * @throws JsonException when parsing fails.
+     * @see #getJsonObject(InputStream)
+     * @see #getJsonObjectFromFile(String)
+     * @see #getJsonArray(String)
+     */
+    public static JsonObject getJsonObject(String serializedJson) {
         try (StringReader rdr = new StringReader(serializedJson)) {
-            return Json.createReader(rdr).readObject();
+            try (JsonReader jsonReader = Json.createReader(rdr)) {
+                return jsonReader.readObject();
+            }
+        }
+    }
+
+    /**
+     * Return the contents of the {@link InputStream} as a JSON object.
+     *
+     * This method closes its resources when an exception occurs, but does
+     * not catch any exceptions.
+     * The caller of this method is responsible for closing the provided stream.
+     * @param stream the input stream to read from
+     * @throws JsonException when parsing fails.
+     * @see #getJsonObject(String)
+     * @see #getJsonObjectFromFile(String)
+     */
+    public static JsonObject getJsonObject(InputStream stream) {
+        try (JsonReader jsonReader = Json.createReader(stream)) {
+            return jsonReader.readObject();
         }
     }
-    
-    public static javax.json.JsonArray getJsonArray(String serializedJson) {
+
+    /**
+     * Return the contents of the file as a JSON object.
+     * This method closes its resources when an exception occurs, but does
+     * not catch any exceptions.
+     * @param fileName the name of the file to read from
+     * @throws FileNotFoundException when the file cannot be opened for reading
+     * @throws JsonException when parsing fails.
+     * @see #getJsonObject(String)
+     * @see #getJsonObject(InputStream)
+     */
+    public static JsonObject getJsonObjectFromFile(String fileName) throws IOException {
+        try (FileReader rdr = new FileReader(fileName)) {
+            try (JsonReader jsonReader = Json.createReader(rdr)) {
+                return jsonReader.readObject();
+            }
+        }
+    }
+
+    /**
+     * Return the contents of the string as a JSON array.
+     * This method closes its resources when an exception occurs, but does
+     * not catch any exceptions.
+     * @param serializedJson the JSON array serialized as a {@code String}
+     * @throws JsonException when parsing fails.
+     * @see #getJsonObject(String)
+     */
+    public static JsonArray getJsonArray(String serializedJson) {
         try (StringReader rdr = new StringReader(serializedJson)) {
-            return Json.createReader(rdr).readArray();
+            try (JsonReader jsonReader = Json.createReader(rdr)) {
+                return jsonReader.readArray();
+            }
         }
     }
 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
index 59a23a43452..ef8ab39122f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
+++ b/src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java
@@ -5,11 +5,11 @@
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.sql.Timestamp;
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 
 /**
  * A JSON builder that drops any null values. If we didn't drop'em,
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java b/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java
index 5050aad5bf7..624e49623f2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/EMailValidator.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.validation;
 
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 
 import org.apache.commons.validator.routines.EmailValidator;
 
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java
index c32e6728358..41e7f1b8b22 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/PasswordValidatorServiceBean.java
@@ -18,9 +18,9 @@
 import java.util.logging.Logger;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.inject.Named;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.inject.Named;
 import org.passay.CharacterCharacteristicsRule;
 import org.passay.CharacterRule;
 import org.passay.DictionaryRule;
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java b/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java
index 846ae48783a..285f34d3f8c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/URLValidator.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.validation;
-import edu.harvard.iq.dataverse.util.BundleUtil;
-import javax.validation.ConstraintValidator;
-import javax.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintValidator;
+import jakarta.validation.ConstraintValidatorContext;
 import org.apache.commons.validator.routines.UrlValidator;
 
 /**
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java
index 310dc950858..6ec677bd7a8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateEmail.java
@@ -11,8 +11,8 @@
 import java.lang.annotation.Retention;
 import java.lang.annotation.Target;
 
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 /**
  *
  * @author skraffmi
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java
index 5aaab0c2e8e..3834b119598 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateURL.java
@@ -6,8 +6,8 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
 
 @Target({FIELD})
 @Retention(RUNTIME)
diff --git a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java
index 0583b70df49..6307edd073a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java
+++ b/src/main/java/edu/harvard/iq/dataverse/validation/ValidateUserName.java
@@ -10,11 +10,11 @@
 import java.lang.annotation.Retention;
 import static java.lang.annotation.RetentionPolicy.RUNTIME;
 import java.lang.annotation.Target;
-import javax.validation.Constraint;
-import javax.validation.Payload;
-import javax.validation.constraints.NotBlank;
-import javax.validation.constraints.Size;
-import javax.validation.constraints.Pattern;
+import jakarta.validation.Constraint;
+import jakarta.validation.Payload;
+import jakarta.validation.constraints.NotBlank;
+import jakarta.validation.constraints.Size;
+import jakarta.validation.constraints.Pattern;
 
 /**
  *
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java
index 577e0f756de..94fefa9bc13 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/PendingWorkflowInvocation.java
@@ -9,14 +9,14 @@
 import java.io.Serializable;
 import java.util.HashMap;
 import java.util.Map;
-import javax.persistence.ElementCollection;
-import javax.persistence.Entity;
-import javax.persistence.FetchType;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToOne;
+import jakarta.persistence.ElementCollection;
+import jakarta.persistence.Entity;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToOne;
 
 /**
  * A workflow whose current step waits for an external system to complete a
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java b/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java
index 6c73ed0e64b..bd32c517bc6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/Workflow.java
@@ -6,16 +6,16 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
-import javax.persistence.CascadeType;
-import javax.persistence.Entity;
-import javax.persistence.FetchType;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.NamedQueries;
-import javax.persistence.NamedQuery;
-import javax.persistence.OneToMany;
-import javax.persistence.OrderColumn;
+import jakarta.persistence.CascadeType;
+import jakarta.persistence.Entity;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.NamedQueries;
+import jakarta.persistence.NamedQuery;
+import jakarta.persistence.OneToMany;
+import jakarta.persistence.OrderColumn;
 
 /**
  * A list of steps that can be executed with a given context. 
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java
index d57b7072be7..47f24c9b8bd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/WorkflowServiceBean.java
@@ -6,7 +6,6 @@
 import edu.harvard.iq.dataverse.EjbDataverseEngine;
 import edu.harvard.iq.dataverse.RoleAssigneeServiceBean;
 import edu.harvard.iq.dataverse.UserNotification;
-import edu.harvard.iq.dataverse.UserNotification.Type;
 import edu.harvard.iq.dataverse.UserNotificationServiceBean;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
@@ -33,15 +32,15 @@
 import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.ejb.Asynchronous;
-import javax.ejb.EJB;
-import javax.ejb.Stateless;
-import javax.ejb.TransactionAttribute;
-import javax.ejb.TransactionAttributeType;
-import javax.inject.Inject;
-import javax.persistence.EntityManager;
-import javax.persistence.PersistenceContext;
-import javax.persistence.TypedQuery;
+import jakarta.ejb.Asynchronous;
+import jakarta.ejb.EJB;
+import jakarta.ejb.Stateless;
+import jakarta.ejb.TransactionAttribute;
+import jakarta.ejb.TransactionAttributeType;
+import jakarta.inject.Inject;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.PersistenceContext;
+import jakarta.persistence.TypedQuery;
 
 /**
  * Service bean for managing and executing {@link Workflow}s
@@ -394,7 +393,7 @@ private void workflowCompleted(Workflow wf, WorkflowContext ctxt) {
                 String dataFilePIDFormat = settings.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, "DEPENDENT");
                 boolean registerGlobalIdsForFiles = 
                         (currentGlobalIdProtocol.equals(ctxt.getDataset().getProtocol()) || dataFilePIDFormat.equals("INDEPENDENT")) 
-                        && systemConfig.isFilePIDsEnabled();
+                        && systemConfig.isFilePIDsEnabledForCollection(ctxt.getDataset().getOwner());
                 if ( registerGlobalIdsForFiles ){
                     registerGlobalIdsForFiles = currentGlobalAuthority.equals( ctxt.getDataset().getAuthority() );
                 }
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java
index 105af6a00d8..b0567bff107 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/ArchivalSubmissionWorkflowStep.java
@@ -1,6 +1,5 @@
 package edu.harvard.iq.dataverse.workflow.internalspi;
 
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
@@ -10,13 +9,12 @@
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStep;
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult;
 
-import java.lang.reflect.Constructor;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 
 /**
  * A step that submits a BagIT bag of the newly published dataset version via a
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java
index bbe200aaeb3..ee770d4057e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/AuthorizedExternalStep.java
@@ -6,23 +6,18 @@
 import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType;
 import edu.harvard.iq.dataverse.workflow.step.Failure;
 import edu.harvard.iq.dataverse.workflow.step.Pending;
-import edu.harvard.iq.dataverse.workflow.step.Success;
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStep;
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult;
 import edu.harvard.iq.dataverse.workflows.WorkflowUtil;
 
-import static edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult.OK;
-
-import java.io.StringReader;
 import java.nio.charset.StandardCharsets;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import java.util.regex.Pattern;
 
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.HttpMethodBase;
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/HttpSendReceiveClientStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/HttpSendReceiveClientStep.java
index 08964c78137..93ee5e60c9b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/HttpSendReceiveClientStep.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/HttpSendReceiveClientStep.java
@@ -109,7 +109,7 @@ HttpMethodBase buildMethod(boolean rollback, WorkflowContext ctxt) throws Except
         templateParams.put( "invocationId", ctxt.getInvocationId() );
         templateParams.put( "dataset.id", Long.toString(ctxt.getDataset().getId()) );
         templateParams.put( "dataset.identifier", ctxt.getDataset().getIdentifier() );
-        templateParams.put( "dataset.globalId", ctxt.getDataset().getGlobalIdString() );
+        templateParams.put( "dataset.globalId", ctxt.getDataset().getGlobalId().asString() );
         templateParams.put( "dataset.displayName", ctxt.getDataset().getDisplayName() );
         templateParams.put( "dataset.citation", ctxt.getDataset().getCitation() );
         templateParams.put( "minorVersion", Long.toString(ctxt.getNextMinorVersionNumber()) );
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java
index 3478d9398f0..124eea801d9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/internalspi/LDNAnnounceDatasetVersionStep.java
@@ -27,12 +27,12 @@
 import java.util.UUID;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonValue;
 
 import org.apache.http.client.methods.CloseableHttpResponse;
 import org.apache.http.client.methods.HttpPost;
@@ -216,7 +216,7 @@ HttpPost buildAnnouncement(boolean qb, WorkflowContext ctxt, JsonObject target)
         Dataset d = ctxt.getDataset();
         job.add("object",
                 Json.createObjectBuilder().add("id", d.getLocalURL())
-                        .add("ietf:cite-as", d.getGlobalId().toURL().toExternalForm())
+                        .add("ietf:cite-as", d.getGlobalId().asURL())
                         .add("sorg:name", d.getDisplayName()).add("type", "sorg:Dataset"));
         job.add("origin", Json.createObjectBuilder().add("id", SystemConfig.getDataverseSiteUrlStatic())
                 .add("inbox", SystemConfig.getDataverseSiteUrlStatic() + "/api/inbox").add("type", "Service"));
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java b/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java
index a06531a2666..07bcf247533 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflow/step/WorkflowStepData.java
@@ -3,14 +3,14 @@
 import edu.harvard.iq.dataverse.workflow.Workflow;
 import java.io.Serializable;
 import java.util.Map;
-import javax.persistence.Column;
-import javax.persistence.ElementCollection;
-import javax.persistence.Entity;
-import javax.persistence.FetchType;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.ManyToOne;
+import jakarta.persistence.Column;
+import jakarta.persistence.ElementCollection;
+import jakarta.persistence.Entity;
+import jakarta.persistence.FetchType;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.ManyToOne;
 
 /**
  * A database row describing a step in a workflow. Actual steps can be instantiated
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java
index d03afcaa91a..7cfa226d7ba 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowComment.java
@@ -5,14 +5,14 @@
 import java.io.Serializable;
 import java.sql.Timestamp;
 import java.util.Date;
-import javax.persistence.Column;
-import javax.persistence.Entity;
-import javax.persistence.EnumType;
-import javax.persistence.Enumerated;
-import javax.persistence.GeneratedValue;
-import javax.persistence.GenerationType;
-import javax.persistence.Id;
-import javax.persistence.JoinColumn;
+import jakarta.persistence.Column;
+import jakarta.persistence.Entity;
+import jakarta.persistence.EnumType;
+import jakarta.persistence.Enumerated;
+import jakarta.persistence.GeneratedValue;
+import jakarta.persistence.GenerationType;
+import jakarta.persistence.Id;
+import jakarta.persistence.JoinColumn;
 
 @Entity
 public class WorkflowComment implements Serializable {
diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java
index e6e6bfd23c8..b104f113db2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java
@@ -3,17 +3,15 @@
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.api.Util;
 
-import java.io.StringReader;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import javax.json.Json;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-
+import jakarta.json.Json;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder;
-import edu.harvard.iq.dataverse.workflow.internalspi.PauseWithMessageStep;
 import edu.harvard.iq.dataverse.workflow.step.Failure;
 import edu.harvard.iq.dataverse.workflow.step.Success;
 import edu.harvard.iq.dataverse.workflow.step.WorkflowStepResult;
@@ -43,8 +41,8 @@ public static JsonArrayBuilder getAllWorkflowComments(DatasetVersion datasetVers
     }
 
     public static WorkflowStepResult parseResponse(String externalData) {
-        try (StringReader reader = new StringReader(externalData)) {
-            JsonObject response = Json.createReader(reader).readObject();
+        try {
+            JsonObject response = JsonUtil.getJsonObject(externalData);
             String status = null;
             //Lower case is documented, upper case is deprecated
             if(response.containsKey("status")) {
diff --git a/src/main/java/propertyFiles/BuiltInRoles.properties b/src/main/java/propertyFiles/BuiltInRoles.properties
index 1f3393d9c91..026df600a9c 100644
--- a/src/main/java/propertyFiles/BuiltInRoles.properties
+++ b/src/main/java/propertyFiles/BuiltInRoles.properties
@@ -1,7 +1,7 @@
 role.admin.name=Admin
 role.admin.description=A person who has all permissions for dataverses, datasets, and files, including approving requests for restricted data.
-role.editor.name=Contributor
-role.editor.description=For datasets, a person who can edit License + Terms, and then submit them for review.
+role.contributor.name=Contributor
+role.contributor.description=For datasets, a person who can edit License + Terms, and then submit them for review.
 role.curator.name=Curator
 role.curator.description=For datasets, a person who can edit License + Terms, edit Permissions, and publish datasets.
 role.dscontributor.name=Dataset Creator
diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties
index 45807dc7cde..10576c0c116 100644
--- a/src/main/java/propertyFiles/Bundle.properties
+++ b/src/main/java/propertyFiles/Bundle.properties
@@ -15,6 +15,8 @@ embargoed=Embargoed
 embargoedaccess=Embargoed with Access
 embargoedandrestricted=Embargoed and then Restricted
 embargoedandrestrictedaccess=Embargoed and then Restricted with Access
+incomplete=Incomplete metadata
+valid=Valid
 find=Find
 search=Search
 language=Language
@@ -44,6 +46,7 @@ description=Description
 subject=Subject
 close=Close
 preview=Preview
+query=Query
 continue=Continue
 name=Name
 institution=Institution
@@ -52,6 +55,7 @@ affiliation=Affiliation
 storage=Storage
 curationLabels=Curation Labels
 metadataLanguage=Dataset Metadata Language
+guestbookEntryOption=Guestbook Entry Option
 createDataverse=Create Dataverse
 remove=Remove
 done=Done
@@ -60,6 +64,7 @@ manager=Manager
 curator=Curator
 explore=Explore
 download=Download
+transfer=Globus Transfer
 downloadOriginal=Original Format
 downloadArchival=Archival Format (.tab)
 deaccession=Deaccession
@@ -145,6 +150,7 @@ contact.header=Contact {0}
 contact.dataverse.header=Email Dataverse Contact
 contact.dataset.header=Email Dataset Contact
 contact.to=To
+contact.cc=CC
 contact.support=Support
 contact.from=From
 contact.from.required=User email is required.
@@ -169,7 +175,7 @@ contact.context.subject.support={0} support request: {1}
 contact.context.dataverse.intro={0}You have just been sent the following message from {1} via the {2} hosted dataverse named "{3}":\n\n---\n\n
 contact.context.dataverse.ending=\n\n---\n\n{0}\n{1}\n\nGo to dataverse {2}/dataverse/{3}\n\nYou received this email because you have been listed as a contact for the dataverse. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email.
 contact.context.dataverse.noContact=There is no contact address on file for this dataverse so this message is being sent to the system address.\n\n
-contact.context.dataset.greeting.helloFirstLast=Hello {0} {1},
+contact.context.dataset.greeting.helloFirstLast=Hello {0},
 contact.context.dataset.greeting.organization=Attention Dataset Contact:
 contact.context.dataset.intro={0}\n\nYou have just been sent the following message from {1} via the {2} hosted dataset titled "{3}" ({4}):\n\n---\n\n
 contact.context.dataset.ending=\n\n---\n\n{0}\n{1}\n\nGo to dataset {2}/dataset.xhtml?persistentId={3}\n\nYou received this email because you have been listed as a contact for the dataset. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email.
@@ -202,6 +208,7 @@ notification.welcome=Welcome to {0}! Get started by adding or finding data. Have
 notification.welcomeConfirmEmail=Also, check for your welcome email to verify your address.
 notification.demoSite=Demo Site
 notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}).
+notification.requestedFileAccess=You have requested access to files in dataset: {0}.
 notification.grantFileAccess=Access granted for files in dataset: {0}.
 notification.rejectFileAccess=Access rejected for requested files in dataset: {0}.
 notification.createDataverse={0} was created in {1} . To learn more about what you can do with your dataverse, check out the {2}. 
@@ -347,7 +354,7 @@ passwdReset.debug=DEBUG
 passwdReset.resetUrl=The reset URL is
 passwdReset.noEmail.tip=No email was actually sent because a user could not be found using the provided email address {0} but we don't mention this because we don't malicious users to use the form to determine if there is an account associated with an email address.
 passwdReset.illegalLink.tip=Your password reset link is not valid. If you need to reset your password, {0}click here{1} in order to request that your password to be reset again.
-passwdReset.newPasswd.details=Please, accept the new terms of use before continuing.
+passwdReset.newPasswd.details=Please pick a strong password that matches the criteria below.
 passwdReset.newPasswd=New Password
 passwdReset.rePasswd=Retype Password
 passwdReset.resetBtn=Continue
@@ -740,7 +747,8 @@ dashboard.card.datamove.dataset.command.error.indexingProblem=Dataset could not
 notification.email.create.dataverse.subject={0}: Your dataverse has been created
 notification.email.create.dataset.subject={0}: Dataset "{1}" has been created
 notification.email.dataset.created.subject={0}: Dataset "{1}" has been created
-notification.email.request.file.access.subject={0}: Access has been requested for a restricted file
+notification.email.request.file.access.subject={0}: Access has been requested for a restricted file in dataset "{1}"
+notification.email.requested.file.access.subject={0}: You have requested access to a restricted file  in dataset "{1}"
 notification.email.grant.file.access.subject={0}: You have been granted access to a restricted file
 notification.email.rejected.file.access.subject={0}: Your request for access to a restricted file has been rejected
 notification.email.submit.dataset.subject={0}: Dataset "{1}" has been submitted for review
@@ -766,6 +774,7 @@ notification.email.greeting.html=Hello, <br>
 notification.email.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the User Guide at {1}/{2}/user or contact {3} at {4} for assistance.
 notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0} . Note, the verify link will expire after {1}. Send another verification email by visiting your account page.
 notification.email.requestFileAccess=File access requested for dataset: {0} by {1} ({2}). Manage permissions at {3}.
+notification.email.requestFileAccess.guestbookResponse=<br><br>Guestbook Response:<br><br>{0}
 notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1} ).
 notification.email.rejectFileAccess=Your request for access was rejected for the requested files in the dataset: {0} (view at {1} ). If you have any questions about why your request was rejected, you may reach the dataset owner using the "Contact" link on the upper right corner of the dataset page.
 # Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test
@@ -784,6 +793,7 @@ notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did no
 notification.email.passwordReset=Hi {0},\n\nSomeone, hopefully you, requested a password reset for {1}.\n\nPlease click the link below to reset your Dataverse account password:\n\n {2} \n\n The link above will only work for the next {3} minutes.\n\n Please contact us if you did not request this password reset or need further help.
 notification.email.passwordReset.subject=Dataverse Password Reset Requested
 notification.email.datasetWasCreated=Dataset "<a href = "{0}">{1}</a>" was just created by {2} in the {3} collection.
+notification.email.requestedFileAccess=You have requested access to a file(s) in dataset "<a href = "{0}">{1}</a>". Your request has been sent to the managers of this dataset who will grant or reject your request. If you have any questions, you may reach the dataset managers using the "Contact" link on the upper right corner of the dataset page.
 hours=hours
 hour=hour
 minutes=minutes
@@ -818,6 +828,7 @@ dataverse.curationLabels.title=A set of curation status labels that are used to
 dataverse.curationLabels.disabled=Disabled
 dataverse.category=Category
 dataverse.category.title=The type that most closely reflects this dataverse.
+dataverse.guestbookentryatrequest.title=Whether Guestbooks are displayed to users when they request file access or when they download files.
 dataverse.type.selectTab.top=Select one...
 dataverse.type.selectTab.researchers=Researcher
 dataverse.type.selectTab.researchProjects=Research Project
@@ -911,10 +922,18 @@ dataverse.update.failure=This dataverse was not able to be updated.
 dataverse.selected=Selected
 dataverse.listing.error=Fatal error trying to list the contents of the dataverse. Please report this error to the Dataverse administrator.
 dataverse.datasize=Total size of the files stored in this dataverse: {0} bytes
+dataverse.storage.quota.allocation=Total quota allocation for this collection: {0} bytes
+dataverse.storage.quota.notdefined=No quota defined for this collection
+dataverse.storage.quota.updated=Storage quota successfully set for the collection
+dataverse.storage.quota.deleted=Storage quota successfully disabled for the collection
+dataverse.storage.quota.superusersonly=Only superusers can change storage quotas.
+dataverse.storage.use=Total recorded size of the files stored in this collection (user-uploaded files plus the versions in the archival tab-delimited format when applicable): {0} bytes
 dataverse.datasize.ioerror=Fatal IO error while trying to determine the total size of the files stored in the dataverse. Please report this error to the Dataverse administrator.
 dataverse.inherited=(inherited from enclosing Dataverse)
 dataverse.default=(Default)
 dataverse.metadatalanguage.setatdatasetcreation=Chosen at Dataset Creation
+dataverse.guestbookentry.atdownload=Guestbook Entry At Download
+dataverse.guestbookentry.atrequest=Guestbook Entry At Access Request
 # rolesAndPermissionsFragment.xhtml
 
 # advanced.xhtml
@@ -994,6 +1013,9 @@ dataverse.results.btn.sort.option.relevance=Relevance
 dataverse.results.cards.foundInMetadata=Found in Metadata Fields:
 dataverse.results.cards.files.tabularData=Tabular Data
 dataverse.results.solrIsDown=Please note: Due to an internal error, browsing and searching is not available.
+dataverse.results.solrIsTemporarilyUnavailable=Search Engine service (Solr) is temporarily unavailable because of high load. Please try again later.
+dataverse.results.solrIsTemporarilyUnavailable.extraText=Note that all the datasets that are part of this collection are accessible via direct links and registered DOIs.
+dataverse.results.solrFacetsDisabled=Facets temporarily unavailable.
 dataverse.theme.title=Theme
 dataverse.theme.inheritCustomization.title=For this dataverse, use the same theme as the parent dataverse.
 dataverse.theme.inheritCustomization.label=Inherit Theme
@@ -1059,9 +1081,9 @@ dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name
 dataverse.widgets.advanced.invalid.message=Please enter a valid URL
 dataverse.widgets.advanced.success.message=Successfully updated your Personal Website URL
 dataverse.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated.
-facet.collection.label=Show Collections
-facet.dataset.label=Show Data Projects
-facet.datafile.label=Show Files
+facet.collection.label=Toggle Collections
+facet.dataset.label=Toggle Data Projects
+facet.datafile.label=Toggle Files
 
 # permissions-manage.xhtml
 dataverse.permissions.title=Permissions
@@ -1097,6 +1119,8 @@ dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID
 dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Email
 dataverse.permissionsFiles.usersOrGroups.tabHeader.authentication=Authentication
 dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Files
+dataverse.permissionsFiles.usersOrGroups.tabHeader.accessRequestDate=Date
+dataverse.permissionsFiles.usersOrGroups.tabHeader.accessRequestDateNotAvailable=N/A
 dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Access
 dataverse.permissionsFiles.usersOrGroups.file=File
 dataverse.permissionsFiles.usersOrGroups.files=Files
@@ -1354,6 +1378,16 @@ dataset.guestbookResponse.guestbook.additionalQuestions=Additional Questions
 dataset.guestbookResponse.showPreview.errorMessage=Can't show preview.
 dataset.guestbookResponse.showPreview.errorDetail=Couldn't write guestbook response.
 
+#GuestbookResponse
+dataset.guestbookResponse=Guestbook Response
+dataset.guestbookResponse.id=Guestbook Response ID
+dataset.guestbookResponse.date=Response Date
+dataset.guestbookResponse.respondent=Respondent
+dataset.guestbookResponse.question=Q
+dataset.guestbookResponse.answer=A
+dataset.guestbookResponse.noResponse=(No Response)
+
+
 # dataset.xhtml
 dataset.configureBtn=Configure
 dataset.pageTitle=Add New Dataset
@@ -1361,8 +1395,10 @@ dataset.pageTitle=Add New Dataset
 dataset.accessBtn=Access Dataset
 dataset.accessBtn.header.download=Download Options
 dataset.accessBtn.header.explore=Explore Options
+dataset.accessBtn.header.configure=Configure Options
 dataset.accessBtn.header.compute=Compute Options
 dataset.accessBtn.download.size=ZIP ({0})
+dataset.accessBtn.transfer.size=({0})
 dataset.accessBtn.too.big=The dataset is too large to download. Please select the files you need from the files table.
 dataset.accessBtn.original.too.big=The dataset is too large to download in the original format. Please select the files you need from the files table.
 dataset.accessBtn.archival.too.big=The dataset is too large to download in the archival format. Please select the files you need from the files table.
@@ -1401,17 +1437,22 @@ metrics.title=Metrics
 metrics.title.tip=View more metrics information
 metrics.dataset.title=Dataset Metrics
 metrics.dataset.tip.default=Aggregated metrics for this dataset.
+metrics.dataset.makedatacount.title=Make Data Count (MDC) Metrics
+metrics.dataset.makedatacount.since=since
 metrics.dataset.tip.makedatacount=Metrics collected using <a href="https://makedatacount.org/counter-code-of-practice-for-research-data/" target="_blank" rel="noopener"/>Make Data Count</a> standards.
-metrics.dataset.views.tip=Dataset views are combined with both aggregated file views and file downloads.
+metrics.dataset.views.tip=Aggregate of views of the dataset landing page, file views, and file downloads.
 metrics.dataset.downloads.default.tip=Total aggregated downloads of files in this dataset.
 metrics.dataset.downloads.makedatacount.tip=Each file downloaded is counted as 1, and added to the total download count.
+metrics.dataset.downloads.premakedatacount.tip=Downloads prior to enabling MDC. Counts do not have the same filtering and detail as MDC metrics.
 metrics.dataset.citations.tip=Click for a list of citation URLs.
 metrics.file.title=File Metrics
 metrics.file.tip.default=Metrics for this individual file.
 metrics.file.tip.makedatacount=Individual file downloads are tracked in Dataverse but are not reported as part of the Make Data Count standard.
 metrics.file.downloads.tip=Total downloads of this file.
+metrics.file.downloads.nonmdc.tip=Total downloads. Due to differences between MDC and Dataverse's internal tracking, the sum of these for all files in a dataset may be larger than total downloads reported for a dataset.
 metrics.views={0, choice, 0#Views|1#View|2#Views}
 metrics.downloads={0, choice, 0#Downloads|1#Download|2#Downloads}
+metrics.downloads.nonMDC={0, choice, 0#|1# (+ 1 pre-MDC |2< (+ {0} pre-MDC }
 metrics.citations={0, choice, 0#Citations|1#Citation|2#Citations}
 metrics.citations.dialog.header=Dataset Citations
 metrics.citations.dialog.help=Citations for this dataset are retrieved from Crossref via DataCite using Make Data Count standards. For more information about dataset metrics, please refer to the <a href="{0}/{1}/user/dataset-management.html#dataset-metrics-and-make-data-count" title="Dataset Metrics and Make Data Count - Dataverse User Guide" target="_blank">User Guide</a>.
@@ -1553,6 +1594,7 @@ dataset.message.addFiles.Failure=Failed to add files to the dataset. Please try
 dataset.message.addFiles.partialSuccess=Partial success: only {0} files out of {1} have been saved. Please try uploading the missing file(s) again.
 dataset.message.publish.warning=<b>This draft version needs to be published.</b> 
 dataset.message.submit.warning=<b>This draft version needs to be submitted for review.</b>
+dataset.message.incomplete.warning=This draft version has incomplete metadata that needs to be edited before it can be published.
 dataset.message.publish.remind.draft=When ready for sharing, please <b>publish</b> it so that others can see these changes.
 dataset.message.submit.remind.draft=When ready for sharing, please <b>submit it for review</b>.
 dataset.message.publish.remind.draft.filePage=When ready for sharing, please <a href="/dataset.xhtml?persistentId={0}">go to the dataset page</a> to publish it so that others can see these changes.
@@ -1589,6 +1631,13 @@ dataset.metadata.persistentId=Persistent Identifier
 dataset.metadata.persistentId.tip=The Dataset's unique persistent identifier, either a DOI or Handle
 dataset.metadata.alternativePersistentId=Previous Dataset Persistent ID
 dataset.metadata.alternativePersistentId.tip=A previously used persistent identifier for the Dataset, either a DOI or Handle
+dataset.metadata.invalidEntry=is not a valid entry.
+dataset.metadata.invalidDate=is not a valid date. "yyyy" is a supported format.
+dataset.metadata.invalidNumber=is not a valid number.
+dataset.metadata.invalidGeospatialCoordinates=has invalid coordinates. East must be greater than West and North must be greater than South. Missing values are NOT allowed.
+dataset.metadata.invalidInteger=is not a valid integer.
+dataset.metadata.invalidURL=is not a valid URL.
+dataset.metadata.invalidEmail=is not a valid email address.
 file.metadata.preview=Preview
 file.metadata.filetags=File Tags
 file.metadata.persistentId=File Persistent ID
@@ -1614,8 +1663,10 @@ dataset.inValidSelectedFilesForDownloadWithEmbargo=Embargoed and/or Restricted F
 dataset.noValidSelectedFilesForDownload=The selected file(s) may not be downloaded because you have not been granted access.
 dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access.
 dataset.mixedSelectedFilesForDownloadWithEmbargo=The embargoed and/or restricted file(s) selected may not be downloaded because you have not been granted access.
-
+dataset.mixedSelectedFilesForTransfer=Some file(s) cannot be transferred. (They are restricted, embargoed, or not Globus accessible.)
+dataset.inValidSelectedFilesForTransfer=Ineligible Files Selected
 dataset.downloadUnrestricted=Click Continue to download the files you have access to download.
+dataset.transferUnrestricted=Click Continue to transfer the elligible files.
 
 dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button.
 dataset.requestAccessToRestrictedFilesWithEmbargo=Embargoed files cannot be accessed during the embargo period. If your selection contains restricted files, you may request access to them by clicking the Request Access button.
@@ -1659,7 +1710,8 @@ file.select.tooltip=Select Files
 file.selectAllFiles=Select all {0} files in this dataset.
 file.dynamicCounter.filesPerPage=Files Per Page
 file.selectToAddBtn=Select Files to Add
-file.selectToAdd.tipLimit=File upload limit is {0} per file. 
+file.selectToAdd.tipLimit=File upload limit is {0} per file.
+file.selectToAdd.tipQuotaRemaining=Storage quota: {0} remaining. 
 file.selectToAdd.tipMaxNumFiles=Maximum of {0} {0, choice, 0#files|1#file|2#files} per upload.
 file.selectToAdd.tipTabularLimit=<a href="{0}/{1}/user/dataset-management.html#tabular-data-files" title="Tabular Data Files - Dataverse User Guide" target="_blank" rel="noopener">Tabular file ingest</a> is limited to {2}.
 file.selectToAdd.tipPerFileTabularLimit=Ingest is limited to the following file sizes based on their format: {0}.
@@ -1690,7 +1742,9 @@ file.editFile=Edit
 
 file.actionsBlock=File Actions
 file.accessBtn=Access File
+file.accessBtn.header.access=File Access
 file.accessBtn.header.download=Download Options
+file.accessBtn.header.metadata=Download Metadata
 file.optionsBtn=File Options
 file.optionsBtn.header.edit=Edit Options
 file.optionsBtn.header.configure=Configure Options
@@ -1727,6 +1781,7 @@ file.download.subset.header=Download Data Subset
 file.preview=Preview:
 file.fileName=File Name
 file.sizeNotAvailable=Size not available
+file.ingestFailed=Ingest failed. No further information is available.
 file.type.tabularData=Tabular Data
 file.originalChecksumType=Original File {0}
 file.checksum.exists.tip=A file with this checksum already exists in the dataset.
@@ -1807,6 +1862,8 @@ file.spss-savEncoding.current=Current Selection:
 file.spss-porExtraLabels=Variable Labels
 file.spss-porExtraLabels.title=Upload an additional text file with extra variable labels.
 file.spss-porExtraLabels.selectToAddBtn=Select File to Add
+file.ingest.saveFailed.message=Ingest succeeded, but failed to save the ingested tabular data in the database; no further information is available
+file.ingest.saveFailed.detail.message=Ingest succeeded, but failed to save the ingested tabular data in the database: 
 file.ingestFailed.header=File available in original format only
 file.ingestFailed.message=<a href="{0}/{1}/user/dataset-management.html#tabular-data-files" title="Tabular Data Files - Dataverse User Guide" target="_blank">Tabular ingest</a> was unsuccessful.
 file.downloadBtn.format.all=All File Formats + Information
@@ -1932,6 +1989,7 @@ file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about th
 file.dataFilesTab.versions.preloadmessage=(Loading versions...)
 file.previewTab.externalTools.header=Available Previews
 file.previewTab.button.label=Preview
+file.toolsTab.button.label=File Tools
 file.previewTab.previews.not.available=Public previews are not available for this file.
 file.deleteDialog.tip=Are you sure you want to delete this dataset and all of its files? You cannot undelete this dataset.
 file.deleteDialog.header=Delete Dataset
@@ -2007,6 +2065,11 @@ file.results.btn.sort.option.newest=Newest
 file.results.btn.sort.option.oldest=Oldest
 file.results.btn.sort.option.size=Size
 file.results.btn.sort.option.type=Type
+file.results.presort.tag=Group by Tag
+file.results.presort.tag.desc=Datafiles will be grouped by Tag before being sorted.
+file.results.presort.folder=Group by Folder
+file.results.presort.folder.desc=Datafiles will be grouped by Folder before being sorted.
+file.results.presort.change.success=Grouping of Files in File Table updated.
 file.compute.fileAccessDenied=This file is restricted and you may not compute on it because you have not been granted access.
 file.configure.Button=Configure
 
@@ -2082,12 +2145,23 @@ file.description.label=Description
 file.tags.label=Tags
 file.lastupdated.label=Last Updated
 file.DatasetVersion=Version
+file.accessBtn.header.query=Query Options
 
 file.previewTab.tool.open=Open
 file.previewTab.header=Preview
 file.previewTab.presentation=File Preview Tool
 file.previewTab.openBtn=Open in New Window
 file.previewTab.exploreBtn={0} on {1}
+
+file.queryTab.tool.open=Open
+file.queryTab.header=Query
+file.queryTab.presentation=File Query Tool
+file.queryTab.openBtn=Open in New Window
+file.queryTab.queryBtn={0} on {1}
+
+#if you don't have a tool selected
+file.toolTab.header=File Tools
+
 file.metadataTab.fileMetadata.header=File Metadata
 file.metadataTab.fileMetadata.persistentid.label=Data File Persistent ID
 file.metadataTab.fileMetadata.downloadUrl.label=Download URL
@@ -2145,6 +2219,8 @@ file.message.replaceSuccess=The file has been replaced.
 file.addreplace.file_size_ok=File size is in range.
 file.addreplace.error.byte_abrev=B
 file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}.
+file.addreplace.error.quota_exceeded=This file (size {0}) exceeds the remaining storage quota of {1}.
+file.addreplace.error.unzipped.quota_exceeded=Unzipped files exceed the remaining storage quota of {0}.
 file.addreplace.error.dataset_is_null=The dataset cannot be null.
 file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null.
 file.addreplace.error.parsing=Error in parsing provided json
@@ -2256,6 +2332,7 @@ mydataFragment.publicationStatus=Publication Status
 mydataFragment.roles=Roles
 mydataFragment.resultsByUserName=Results by Username
 mydataFragment.search=Search my data...
+mydataFragment.validity=Metadata validity
 mydata.result=Result
 mydata.results=Results
 mydata.viewnext=View Next
@@ -2316,10 +2393,6 @@ api.prov.error.freeformMissingJsonKey=The JSON object you send must have a key c
 api.prov.error.freeformNoText=No provenance free form text available for this file.
 api.prov.error.noDataFileFound=Could not find a file based on ID.
 
-bagit.sourceOrganization=Dataverse Installation (<Site Url>)
-bagit.sourceOrganizationAddress=<Full address>
-bagit.sourceOrganizationEmail=<Email address>
-
 bagit.checksum.validation.error=Invalid checksum for file "{0}". Manifest checksum={2}, calculated checksum={3}, type={1}
 bagit.checksum.validation.exception=Error while calculating checksum for file "{0}". Checksum type={1}, error={2}
 bagit.validation.bag.file.not.found=Invalid BagIt package: "{0}"
@@ -2435,6 +2508,7 @@ dataset.registered=DatasetRegistered
 dataset.registered.msg=Your dataset is now registered.
 dataset.notlinked=DatasetNotLinked
 dataset.notlinked.msg=There was a problem linking this dataset to yours:
+dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following dataverse(s):  
 datasetversion.archive.success=Archival copy of Version successfully submitted
 datasetversion.archive.failure=Error in submitting an archival copy 
 datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version.
@@ -2557,6 +2631,7 @@ admin.api.deleteUser.success=Authenticated User {0} deleted.
 #Files.java
 files.api.metadata.update.duplicateFile=Filename already exists at {0}
 files.api.no.draft=No draft available for this file
+files.api.only.tabular.supported=This operation is only available for tabular files.
 
 #Datasets.java
 datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset.
@@ -2582,7 +2657,13 @@ datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymi
 datasets.api.creationdate=Date Created
 datasets.api.modificationdate=Last Modified Date
 datasets.api.curationstatus=Curation Status
-
+datasets.api.version.files.invalid.order.criteria=Invalid order criteria: {0}
+datasets.api.version.files.invalid.access.status=Invalid access status: {0}
+datasets.api.deaccessionDataset.invalid.version.identifier.error=Only {0} or a specific version can be deaccessioned
+datasets.api.deaccessionDataset.invalid.forward.url=Invalid deaccession forward URL: {0}
+datasets.api.globusdownloaddisabled=File transfer from Dataverse via Globus is not available for this dataset.
+datasets.api.globusdownloadnotfound=List of files to transfer not found.
+datasets.api.globusuploaddisabled=File transfer to Dataverse via Globus is not available for this dataset.
 
 #Dataverses.java
 dataverses.api.update.default.contributor.role.failure.role.not.found=Role {0} not found.
@@ -2601,6 +2682,10 @@ dataverses.api.move.dataverse.error.datasetLink=Dataset is linked to target data
 dataverses.api.move.dataverse.error.forceMove=Please use the parameter ?forceMove=true to complete the move. This will remove anything from the dataverse that is not compatible with the target dataverse.
 dataverses.api.create.dataset.error.mustIncludeVersion=Please provide initial version in the dataset json
 dataverses.api.create.dataset.error.superuserFiles=Only a superuser may add files via this api
+dataverses.api.create.dataset.error.mustIncludeAuthorName=Please provide author name in the dataset json
+dataverses.api.validate.json.succeeded=The Dataset JSON provided is valid for this Dataverse Collection.
+dataverses.api.validate.json.failed=The Dataset JSON provided failed validation with the following error: 
+dataverses.api.validate.json.exception=Validation failed with following exception:
 
 #Access.java
 access.api.allowRequests.failure.noDataset=Could not find Dataset with id: {0}
@@ -2810,6 +2895,8 @@ Restricted=Restricted
 EmbargoedThenPublic=Embargoed then Public
 EmbargoedThenRestricted=Embargoed then Restricted
 
+#metadata source - Facet Label
+Harvested=Harvested
 
 #Shibboleth login
 idp.fatal.divMissing=<div> specified  as &quot;insertAtDiv&quot; could not be located in the HTML
diff --git a/src/main/java/propertyFiles/License.properties b/src/main/java/propertyFiles/License.properties
index 2347fed9db6..6ded8c41d5b 100644
--- a/src/main/java/propertyFiles/License.properties
+++ b/src/main/java/propertyFiles/License.properties
@@ -1,4 +1,4 @@
 license.cc0_1.0.description=Creative Commons CC0 1.0 Universal Public Domain Dedication. 
 license.cc_by_4.0.description=Creative Commons Attribution 4.0 International License.
 license.cc0_1.0.name=CC0 1.0
-license.cc_by_4.0.name=CC-BY 4.0
+license.cc_by_4.0.name=CC BY 4.0
diff --git a/src/main/java/propertyFiles/geospatial.properties b/src/main/java/propertyFiles/geospatial.properties
index 04db8d3d05f..86f297c29b9 100644
--- a/src/main/java/propertyFiles/geospatial.properties
+++ b/src/main/java/propertyFiles/geospatial.properties
@@ -8,10 +8,10 @@ datasetfieldtype.city.title=City
 datasetfieldtype.otherGeographicCoverage.title=Other
 datasetfieldtype.geographicUnit.title=Geographic Unit
 datasetfieldtype.geographicBoundingBox.title=Geographic Bounding Box
-datasetfieldtype.westLongitude.title=West Longitude
-datasetfieldtype.eastLongitude.title=East Longitude
-datasetfieldtype.northLongitude.title=North Latitude
-datasetfieldtype.southLongitude.title=South Latitude
+datasetfieldtype.westLongitude.title=Westernmost (Left) Longitude
+datasetfieldtype.eastLongitude.title=Easternmost (Right) Longitude
+datasetfieldtype.northLongitude.title=Northernmost (Top) Latitude
+datasetfieldtype.southLongitude.title=Southernmost (Bottom) Latitude
 datasetfieldtype.geographicCoverage.description=Information on the geographic coverage of the data. Includes the total geographic scope of the data.
 datasetfieldtype.country.description=The country or nation that the Dataset is about.
 datasetfieldtype.state.description=The state or province that the Dataset is about. Use GeoNames for correct spelling and avoid abbreviations.
@@ -89,10 +89,10 @@ controlledvocabulary.country.cook_islands=Cook Islands
 controlledvocabulary.country.costa_rica=Costa Rica
 controlledvocabulary.country.croatia=Croatia
 controlledvocabulary.country.cuba=Cuba
-controlledvocabulary.country.curacao=Curaçao
+controlledvocabulary.country.curacao=Cura\u00e7ao
 controlledvocabulary.country.cyprus=Cyprus
 controlledvocabulary.country.czech_republic=Czech Republic
-controlledvocabulary.country.cote_d'ivoire=Côte d'Ivoire
+controlledvocabulary.country.cote_d'ivoire=C\u00f4te d'Ivoire
 controlledvocabulary.country.denmark=Denmark
 controlledvocabulary.country.djibouti=Djibouti
 controlledvocabulary.country.dominica=Dominica
@@ -216,8 +216,8 @@ controlledvocabulary.country.qatar=Qatar
 controlledvocabulary.country.romania=Romania
 controlledvocabulary.country.russian_federation=Russian Federation
 controlledvocabulary.country.rwanda=Rwanda
-controlledvocabulary.country.reunion=Réunion
-controlledvocabulary.country.saint_barthelemy=Saint Barthélemy
+controlledvocabulary.country.reunion=R\u00e9union
+controlledvocabulary.country.saint_barthelemy=Saint Barth\u00e9lemy
 controlledvocabulary.country.saint_helena,_ascension_and_tristan_da_cunha=Saint Helena, Ascension and Tristan da Cunha
 controlledvocabulary.country.saint_kitts_and_nevis=Saint Kitts and Nevis
 controlledvocabulary.country.saint_lucia=Saint Lucia
@@ -282,4 +282,4 @@ controlledvocabulary.country.western_sahara=Western Sahara
 controlledvocabulary.country.yemen=Yemen
 controlledvocabulary.country.zambia=Zambia
 controlledvocabulary.country.zimbabwe=Zimbabwe
-controlledvocabulary.country.aland_islands=Åland Islands
+controlledvocabulary.country.aland_islands=\u00c5land Islands
diff --git a/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml b/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml
index 167fbdbec5d..0294f15e967 100644
--- a/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml
+++ b/src/main/resources/META-INF/batch-jobs/FileSystemImportJob.xml
@@ -34,14 +34,14 @@
             </writer>
             <skippable-exception-classes>
                 <!-- To skip all the exceptions -->
-                <include class="javax.transaction.RollbackException"/>
+                <include class="jakarta.transaction.RollbackException"/>
                 <include class="java.lang.NullPointerException"/>
                 <include class="java.lang.Exception"/>
                 <include class="java.lang.Throwable"/>
             </skippable-exception-classes>
             <no-rollback-exception-classes>
                 <!-- To skip all the exceptions -->
-                <include class="javax.transaction.RollbackException"/>
+                <include class="jakarta.transaction.RollbackException"/>
                 <include class="java.lang.NullPointerException"/>
                 <include class="java.lang.Exception"/>
                 <include class="java.lang.Throwable"/>
diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties
index 58592775a98..ec8427795ee 100644
--- a/src/main/resources/META-INF/microprofile-config.properties
+++ b/src/main/resources/META-INF/microprofile-config.properties
@@ -9,7 +9,16 @@ dataverse.build=
 %ct.dataverse.siteUrl=http://${dataverse.fqdn}:8080
 
 # FILES
-dataverse.files.directory=/tmp/dataverse
+# NOTE: The following uses STORAGE_DIR for both containers and classic installations. When defaulting to
+#       "com.sun.aas.instanceRoot" if not present, it equals the hardcoded default "." in glassfish-web.xml
+#       (which is relative to the domain root folder).
+#       Also, be aware that this props file cannot provide any value for lookups in glassfish-web.xml during servlet
+#       initialization, as this file will not have been read yet! The names and their values are in sync here and over
+#       there to ensure the config checker is able to check for the directories (exist + writeable).
+dataverse.files.directory=${STORAGE_DIR:/tmp/dataverse}
+dataverse.files.uploads=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/uploads
+dataverse.files.docroot=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/docroot
+dataverse.files.globus-cache-maxage=5
 
 # SEARCH INDEX
 dataverse.solr.host=localhost
@@ -40,3 +49,18 @@ dataverse.oai.server.maxsets=100
 # the OAI repository name, as shown by the Identify verb,
 # can be customized via the setting below:
 #dataverse.oai.server.repositoryname=
+
+# PERSISTENT IDENTIFIER PROVIDERS
+# EZID
+dataverse.pid.ezid.api-url=https://ezid.cdlib.org
+
+# DataCite
+dataverse.pid.datacite.mds-api-url=https://mds.test.datacite.org
+dataverse.pid.datacite.rest-api-url=https://api.test.datacite.org
+
+# Handle.Net
+dataverse.pid.handlenet.index=300
+
+# AUTHENTICATION
+dataverse.auth.oidc.pkce.max-cache-size=10000
+dataverse.auth.oidc.pkce.max-cache-age=300
diff --git a/src/main/resources/META-INF/persistence.xml b/src/main/resources/META-INF/persistence.xml
index 45552f36939..e6224dcdf01 100644
--- a/src/main/resources/META-INF/persistence.xml
+++ b/src/main/resources/META-INF/persistence.xml
@@ -1,11 +1,11 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<persistence version="1.0" xmlns="http://java.sun.com/xml/ns/persistence" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/persistence http://java.sun.com/xml/ns/persistence/persistence_1_0.xsd">
+<persistence version="3.0" xmlns="https://jakarta.ee/xml/ns/persistence"
+             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+             xsi:schemaLocation="https://jakarta.ee/xml/ns/persistence https://jakarta.ee/xml/ns/persistence/persistence_3_0.xsd">
     <persistence-unit name="VDCNet-ejbPU" transaction-type="JTA">
-        <!-- provider>oracle.toplink.essentials.ejb.cmp3.EntityManagerFactoryProvider</provider-->
         <provider>org.eclipse.persistence.jpa.PersistenceProvider</provider>
         <jta-data-source>java:app/jdbc/dataverse</jta-data-source>
         <properties>
-            <!--property name="toplink.logging.level" value="FINE"/-->
 	    <!-- disabling weaving, as an experiment: - L.A. -->
 	    <property name="eclipselink.weaving" value="false"/>
 	    <!-- comment out the line below to make the app NOT build 
diff --git a/src/main/resources/db/migration/V5.13.0.1__8092-timestamp-of-data-access-request.sql b/src/main/resources/db/migration/V5.13.0.1__8092-timestamp-of-data-access-request.sql
new file mode 100644
index 00000000000..603b69bf47f
--- /dev/null
+++ b/src/main/resources/db/migration/V5.13.0.1__8092-timestamp-of-data-access-request.sql
@@ -0,0 +1,24 @@
+-- remove multiple File access requests which are
+-- in violation of the new key
+CREATE TABLE tmp_fileaccessrequests (datafile_id bigint NOT NULL, authenticated_user_id INT);
+
+insert into tmp_fileaccessrequests 
+select distinct datafile_id, authenticated_user_id from fileaccessrequests;
+
+DROP TABLE fileaccessrequests;
+
+ALTER TABLE tmp_fileaccessrequests 
+    RENAME TO fileaccessrequests;
+
+ALTER TABLE fileaccessrequests
+ADD COLUMN IF NOT EXISTS creation_time TIMESTAMP WITHOUT TIME ZONE DEFAULT now();
+
+ALTER TABLE  fileaccessrequests ADD  CONSTRAINT fileaccessrequests_pkey PRIMARY KEY (authenticated_user_id, datafile_id);
+ALTER TABLE  fileaccessrequests ADD  CONSTRAINT fk_fileaccessrequests_authenticated_user_id FOREIGN KEY (authenticated_user_id)
+        REFERENCES public.authenticateduser (id) MATCH SIMPLE
+        ON UPDATE NO ACTION
+        ON DELETE NO ACTION;
+ALTER TABLE  fileaccessrequests ADD     CONSTRAINT fk_fileaccessrequests_datafile_id FOREIGN KEY (datafile_id)
+        REFERENCES public.dvobject (id) MATCH SIMPLE
+        ON UPDATE NO ACTION
+        ON DELETE NO ACTION
diff --git a/src/main/resources/db/migration/V5.13.0.2__8889-filepids-in-collections.sql b/src/main/resources/db/migration/V5.13.0.2__8889-filepids-in-collections.sql
new file mode 100644
index 00000000000..5e6ce945fe2
--- /dev/null
+++ b/src/main/resources/db/migration/V5.13.0.2__8889-filepids-in-collections.sql
@@ -0,0 +1 @@
+ALTER TABLE dataverse ADD COLUMN IF NOT EXISTS filePIDsEnabled bool;
diff --git a/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql
new file mode 100644
index 00000000000..c90ee4a5329
--- /dev/null
+++ b/src/main/resources/db/migration/V6.0.0.1__9599-guestbook-at-request.sql
@@ -0,0 +1,63 @@
+ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS request_state VARCHAR(64);
+ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS id SERIAL;
+ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS fileaccessrequests_pkey;
+ALTER TABLE fileaccessrequests ADD CONSTRAINT fileaccessrequests_pkey PRIMARY KEY (id);
+ALTER TABLE fileaccessrequests ADD COLUMN IF NOT EXISTS guestbookresponse_id INT;
+ALTER TABLE fileaccessrequests DROP CONSTRAINT IF EXISTS fk_fileaccessrequests_guestbookresponse;
+ALTER TABLE fileaccessrequests ADD CONSTRAINT fk_fileaccessrequests_guestbookresponse FOREIGN KEY (guestbookresponse_id) REFERENCES guestbookresponse(id);
+DROP INDEX IF EXISTS created_requests;
+CREATE UNIQUE INDEX created_requests ON fileaccessrequests (datafile_id, authenticated_user_id) WHERE request_state='CREATED';
+
+ALTER TABLE dataverse ADD COLUMN IF NOT EXISTS guestbookatrequest bool;
+ALTER TABLE dataset ADD COLUMN IF NOT EXISTS guestbookatrequest bool;
+
+ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS eventtype VARCHAR(255);
+ALTER TABLE guestbookresponse ADD COLUMN IF NOT EXISTS sessionid VARCHAR(255);
+
+DO $$
+    BEGIN 
+        IF EXISTS (select 1 from pg_class where relname='filedownload') THEN 
+
+            UPDATE guestbookresponse g 
+                SET eventtype = (SELECT downloadtype FROM filedownload f where f.guestbookresponse_id = g.id), 
+                    sessionid = (SELECT sessionid FROM filedownload f where f.guestbookresponse_id=g.id);
+            DROP TABLE filedownload;
+        END IF;
+    END
+   $$ ;
+   
+   
+-- This creates a function that ESTIMATES the size of the
+-- GuestbookResponse table (for the metrics display), instead
+-- of relying on straight "SELECT COUNT(*) ..."
+-- It uses statistics to estimate the number of guestbook entries
+-- and the fraction of them related to downloads,
+-- i.e. those that weren't created for 'AccessRequest' events.
+-- Significant potential savings for an active installation.
+-- See https://github.com/IQSS/dataverse/issues/8840 and 
+-- https://github.com/IQSS/dataverse/pull/8972 for more details
+
+CREATE OR REPLACE FUNCTION estimateGuestBookResponseTableSize()
+RETURNS bigint AS $$
+DECLARE
+  estimatedsize bigint;
+BEGIN
+  SELECT CASE WHEN relpages<10 THEN 0
+              ELSE ((reltuples / relpages)
+               * (pg_relation_size('public.guestbookresponse') / current_setting('block_size')::int))::bigint
+               * (SELECT CASE WHEN ((select count(*) from pg_stats where tablename='guestbookresponse') = 0 
+                   OR (select array_position(most_common_vals::text::text[], 'AccessRequest') 
+                       FROM pg_stats WHERE tablename='guestbookresponse' AND attname='eventtype') IS NULL) THEN 1
+                   ELSE 1 - (SELECT (most_common_freqs::text::text[])[array_position(most_common_vals::text::text[], 'AccessRequest')]::bigint
+                       FROM pg_stats WHERE tablename='guestbookresponse' and attname='eventtype') END)
+         END
+     FROM   pg_class
+     WHERE  oid = 'public.guestbookresponse'::regclass INTO estimatedsize;
+
+     if estimatedsize = 0 then
+     SELECT COUNT(id) FROM guestbookresponse WHERE eventtype!= 'AccessRequest' INTO estimatedsize;
+     END if;   
+
+  RETURN estimatedsize;
+END;
+$$ LANGUAGE plpgsql IMMUTABLE;
diff --git a/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql b/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql
new file mode 100644
index 00000000000..536798015ba
--- /dev/null
+++ b/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql
@@ -0,0 +1,14 @@
+-- An aggregated timestamp which is the latest of the availability dates of any embargoed files in the first published version, if present 
+ALTER TABLE dataset ADD COLUMN IF NOT EXISTS embargoCitationDate timestamp without time zone;
+-- ... and an update query that will populate this column for all the published datasets with embargoed files in the first released version:
+UPDATE dataset SET embargocitationdate=o.embargocitationdate
+FROM (SELECT d.id, MAX(e.dateavailable) AS embargocitationdate
+FROM embargo e, dataset d, datafile f, datasetversion v, filemetadata m
+WHERE v.dataset_id = d.id
+AND v.versionstate = 'RELEASED'
+AND v.versionnumber = 1
+AND v.minorversionnumber = 0
+AND f.embargo_id = e.id
+AND m.datasetversion_id = v.id
+AND m.datafile_id = f.id GROUP BY d.id) o WHERE o.id = dataset.id;
+-- (the query follows the logic that used to be in the method Dataset.getCitationDate() that calculated this adjusted date in real time). 
diff --git a/src/main/resources/db/migration/V6.0.0.3__10095-guestbook-at-request2.sql b/src/main/resources/db/migration/V6.0.0.3__10095-guestbook-at-request2.sql
new file mode 100644
index 00000000000..b6157e6a782
--- /dev/null
+++ b/src/main/resources/db/migration/V6.0.0.3__10095-guestbook-at-request2.sql
@@ -0,0 +1,34 @@
+-- This creates a function that ESTIMATES the size of the
+-- GuestbookResponse table (for the metrics display), instead
+-- of relying on straight "SELECT COUNT(*) ..."
+-- It uses statistics to estimate the number of guestbook entries
+-- and the fraction of them related to downloads,
+-- i.e. those that weren't created for 'AccessRequest' events.
+-- Significant potential savings for an active installation.
+-- See https://github.com/IQSS/dataverse/issues/8840 and 
+-- https://github.com/IQSS/dataverse/pull/8972 for more details
+
+CREATE OR REPLACE FUNCTION estimateGuestBookResponseTableSize()
+RETURNS bigint AS $$
+DECLARE
+  estimatedsize bigint;
+BEGIN
+  SELECT CASE WHEN relpages<10 THEN 0
+              ELSE ((reltuples / relpages)
+               * (pg_relation_size('public.guestbookresponse') / current_setting('block_size')::int))::bigint
+               * (SELECT CASE WHEN ((select count(*) from pg_stats where tablename='guestbookresponse') = 0 
+                   OR (select array_position(most_common_vals::text::text[], 'AccessRequest') 
+                       FROM pg_stats WHERE tablename='guestbookresponse' AND attname='eventtype') IS NULL) THEN 1
+                   ELSE 1 - (SELECT (most_common_freqs::text::text[])[array_position(most_common_vals::text::text[], 'AccessRequest')]::float
+                       FROM pg_stats WHERE tablename='guestbookresponse' and attname='eventtype') END)
+         END
+     FROM   pg_class
+     WHERE  oid = 'public.guestbookresponse'::regclass INTO estimatedsize;
+
+     if estimatedsize = 0 then
+     SELECT COUNT(id) FROM guestbookresponse WHERE eventtype!= 'AccessRequest' INTO estimatedsize;
+     END if;   
+
+  RETURN estimatedsize;
+END;
+$$ LANGUAGE plpgsql IMMUTABLE;
diff --git a/src/main/resources/db/migration/V6.0.0.4__10093-privateurluser_id_update.sql b/src/main/resources/db/migration/V6.0.0.4__10093-privateurluser_id_update.sql
new file mode 100644
index 00000000000..260f191f557
--- /dev/null
+++ b/src/main/resources/db/migration/V6.0.0.4__10093-privateurluser_id_update.sql
@@ -0,0 +1 @@
+ update roleassignment set assigneeidentifier=replace(assigneeidentifier, '#','!') where assigneeidentifier like '#%';
\ No newline at end of file
diff --git a/src/main/resources/db/migration/V6.0.0.5__8549-collection-quotas.sql b/src/main/resources/db/migration/V6.0.0.5__8549-collection-quotas.sql
new file mode 100644
index 00000000000..d6c067056ec
--- /dev/null
+++ b/src/main/resources/db/migration/V6.0.0.5__8549-collection-quotas.sql
@@ -0,0 +1,91 @@
+-- The somewhat convoluted queries below populate the storage sizes for the entire
+-- DvObject tree, fast. It IS possible, to do it all with one recursive PostgresQL
+-- query, that will crawl the tree from the leaves (DataFiles) up and add up the
+-- sizes for all the Datasets/Collections above. Unfortunately, that appears to take
+-- some hours on a database the size of the one at IQSS. So what we are doing
+-- instead is first compute the total sizes of all the *directly* linked objects,
+-- with a couple of linear queries. This will correctly calculate the sizes of all the
+-- Datasets (since they can only contain DataFiles, without any other hierarchy) and
+-- those Collections that only contain Datasets; but not the sizes of Collections that
+-- have sub-collections. To take any sub-collections into account we will then run
+-- a recursive query - but we only need to run it on the tree of Collections only,
+-- which makes it reasonably fast on any real life instance. 
+-- *Temporarily* add this "tempstoragesize" column to the DvObject table.
+-- It will be used to calculate the storage sizes of all the DvObjectContainers
+-- (Datasets and Collections), as a matter of convenience. Once calculated, the values
+-- will will be moved to the permanent StorageUse table. 
+ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS tempStorageSize BIGINT;
+-- First we calculate the storage size of each individual dataset (a simple sum
+-- of the storage sizes of all the files in the dataset). 
+-- For datafiles, the storage size = main file size by default
+-- (we are excluding any harvested files and datasets):
+UPDATE dvobject SET tempStorageSize=o.combinedStorageSize
+FROM (SELECT datasetobject.id, SUM(file.filesize) AS combinedStorageSize
+FROM dvobject fileobject, dataset datasetobject, datafile file
+WHERE fileobject.owner_id = datasetobject.id
+AND fileobject.id = file.id
+AND datasetobject.harvestingclient_id IS null
+GROUP BY datasetobject.id) o, dataset ds WHERE o.id = dvobject.id AND dvobject.dtype='Dataset' AND dvobject.id = ds.id AND ds.harvestingclient_id IS null;
+
+-- ... but for ingested tabular files the size of the saved original needs to be added, since
+-- those also take space:
+-- (should be safe to assume that there are no *harvested ingested* files)
+UPDATE dvobject SET tempStorageSize=tempStorageSize+o.combinedStorageSize
+FROM (SELECT datasetobject.id, COALESCE(SUM(dt.originalFileSize),0) AS combinedStorageSize
+FROM dvobject fileobject, dvobject datasetobject, datafile file, datatable dt
+WHERE fileobject.owner_id = datasetobject.id
+AND fileobject.id = file.id
+AND dt.datafile_id = file.id
+GROUP BY datasetobject.id) o, dataset ds WHERE o.id = dvobject.id AND dvobject.dtype='Dataset' AND dvobject.id = ds.id AND ds.harvestingclient_id IS null;
+
+-- there may also be some auxiliary files registered in the database, such as
+-- the content generated and deposited by external tools - diff. privacy stats
+-- being one of the example. These are also considered the "payload" files that
+-- we want to count for the purposes of calculating storage use.
+UPDATE dvobject SET tempStorageSize=tempStorageSize+o.combinedStorageSize
+FROM (SELECT datasetobject.id, COALESCE(SUM(aux.fileSize),0) AS combinedStorageSize
+FROM dvobject fileobject, dvobject datasetobject, datafile file, auxiliaryFile aux
+WHERE fileobject.owner_id = datasetobject.id
+AND fileobject.id = file.id
+AND aux.datafile_id = file.id
+GROUP BY datasetobject.id) o, dataset ds WHERE o.id = dvobject.id AND dvobject.dtype='Dataset' AND dvobject.id = ds.id AND ds.harvestingclient_id IS null;
+
+
+-- ... and then we can repeat the same for collections, by setting the storage size
+-- to the sum of the storage sizes of the datasets *directly* in each collection:
+-- (no attemp is made yet to recursively count the sizes all the chilld sub-collections)
+UPDATE dvobject SET tempStorageSize=o.combinedStorageSize
+FROM (SELECT collectionobject.id, SUM(datasetobject.tempStorageSize) AS combinedStorageSize
+FROM dvobject datasetobject, dvobject collectionobject
+WHERE datasetobject.owner_id = collectionobject.id
+AND datasetobject.tempStorageSize IS NOT null 
+GROUP BY collectionobject.id) o WHERE o.id = dvobject.id AND dvobject.dtype='Dataverse';
+
+-- And now we will update the storage sizes of all the Collection ("Dataverse") objects
+-- that contain sub-collections, *recursively*, to add their sizes to the totals:
+WITH RECURSIVE treestorage (id, owner_id, tempStorageSize, dtype) AS
+(
+    -- All dataverses:
+    SELECT id, owner_id, tempStorageSize, dtype
+    FROM dvobject
+    WHERE dtype = 'Dataverse'
+
+    UNION ALL
+
+    -- Recursive Member:
+    SELECT dvobject.id, treestorage.owner_id, dvobject.tempStorageSize, treestorage.dtype
+    FROM treestorage, dvobject
+    WHERE treestorage.id = dvobject.owner_id
+    AND dvobject.dtype = 'Dataverse'
+)
+UPDATE dvobject SET tempStorageSize=tempStorageSize+(SELECT COALESCE(SUM(tempStorageSize),0)
+FROM treestorage WHERE owner_id=dvobject.id)
+WHERE dvobject.dtype = 'Dataverse'
+AND dvobject.id IN (SELECT owner_id FROM treestorage WHERE owner_id IS NOT null);
+
+-- And, finally, we can move these calculated storage sizes of datasets and
+-- collection to the dedicated new table StorageUse:
+INSERT INTO storageuse (dvobjectcontainer_id,sizeinbytes) (SELECT id, tempstoragesize FROM dvobject WHERE dtype = 'Dataverse');
+INSERT INTO storageuse (dvobjectcontainer_id,sizeinbytes) (SELECT d.id, o.tempstoragesize FROM dvobject o, dataset d WHERE o.id = d.id AND d.harvestingclient_id IS NULL);
+-- ... and drop the temporary column we added to DvObject earlier:
+ALTER TABLE dvobject DROP column tempStorageSize
diff --git a/src/main/resources/db/migration/V6.0.0.6__9506-track-thumb-failures.sql b/src/main/resources/db/migration/V6.0.0.6__9506-track-thumb-failures.sql
new file mode 100644
index 00000000000..156960d2011
--- /dev/null
+++ b/src/main/resources/db/migration/V6.0.0.6__9506-track-thumb-failures.sql
@@ -0,0 +1 @@
+ALTER TABLE dvobject ADD COLUMN IF NOT EXISTS previewimagefail BOOLEAN DEFAULT FALSE;
diff --git a/src/main/webapp/404static.xhtml b/src/main/webapp/404static.xhtml
new file mode 100644
index 00000000000..69ff17ebc0f
--- /dev/null
+++ b/src/main/webapp/404static.xhtml
@@ -0,0 +1,109 @@
+<?xml version='1.0' encoding='UTF-8' ?>
+<!DOCTYPE html>
+<html xmlns="http://www.w3.org/1999/xhtml" xmlns:h="http://java.sun.com/jsf/html" lang="en">
+    <head>
+        <title>#{bundle['error.404.page.title']}</title>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+        <meta http-equiv="Content-Language" content="en" />
+        <meta name="viewport" content="width=device-width, initial-scale=1" />
+        <meta http-equiv="X-UA-Compatible" content="IE=edge" />
+        <meta name="description" content="Share, archive, and get credit for your data. Find and cite data across all research fields." />
+        
+        <link type="image/png" rel="icon" href="https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/images/favicondataverse.png.xhtml" />
+
+        <link type="image/png" rel="image_src" href="https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/images/dataverseproject.png.xhtml" />
+        
+        <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/3.4.1/css/bootstrap.min.css" />
+
+        <link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/3.4.1/css/bootstrap-theme.min.css" />
+
+        <link rel="stylesheet" href="https://cdn.rawgit.com/IQSS/dataverse/develop/src/main/webapp/resources/css/structure.css" />
+
+        <style type="text/css">
+            /* FontCustom CSS */
+            @font-face {
+              font-family: "fontcustom";
+              src: url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.eot");
+              src: url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.eot?#iefix") format("embedded-opentype"),
+                   url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.woff") format("woff"),
+                   url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.ttf") format("truetype"),
+                   url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.svg#fontcustom") format("svg");
+              font-weight: normal;
+              font-style: normal;
+            }
+
+            @media screen and (-webkit-min-device-pixel-ratio:0) {
+              @font-face {
+                font-family: "fontcustom";
+                src: url("https://cdn.rawgit.com/IQSS/dataverse/src/main/webapp/resources/fontcustom/fontcustom_0cdeefae934823416d24b6c2132ac702.svg#fontcustom") format("svg");
+              }
+            }
+            
+            [data-icon]:before { content: attr(data-icon); }
+
+            [data-icon]:before,
+            .icon-dataverse:before {
+              display: inline-block;
+              font-family: "fontcustom";
+              font-style: normal;
+              font-weight: normal;
+              font-variant: normal;
+              line-height: 1;
+              text-decoration: inherit;
+              text-rendering: optimizeLegibility;
+              text-transform: none;
+              -moz-osx-font-smoothing: grayscale;
+              -webkit-font-smoothing: antialiased;
+              font-smoothing: antialiased;
+            }
+
+            .icon-dataverse:before { content: "\f100"; }
+
+            /* Custom CSS */
+            #navbarFixed div.navbar-header img.navbar-brand.custom-logo {height:50px !important;}
+            #dataverseDesc span > span > span > h3 {font-weight: 300 !important;}
+
+            nav.navbar.navbar-default {background: #ececec !important;}
+        </style>
+    </head>
+    <body>
+        <div id="dataverse-header-block">
+            <!-- Navbar Panel -->
+            <nav id="navbarFixed" class="navbar navbar-default navbar-fixed-top" role="navigation">
+                <div class="container">
+                    <div class="navbar-header">
+                        <h:outputFormat class="navbar-brand custom-logo" value="#{bundle['footer.dataverseProject']}"/>
+                    </div>
+                    <div class="collapse navbar-collapse" id="topNavBar">
+                        <ul class="nav navbar-nav navbar-right">
+                            <li>
+                                &#160;
+                            </li>
+                        </ul>
+                    </div>
+                </div>
+            </nav>
+        </div>
+        <div class="container" id="content">
+            <div class="alert alert-danger" role="alert" style="margin-top:3em;">
+                <h:outputFormat value="#{bundle['error.404.message']}" escape="false" />
+            </div>
+        </div>
+        <div id="footer">
+            <div class="container">
+                <div class="row">
+                    <div class="col-sm-8 small">
+                        <p>Copyright &#169; 2023, The President &#38; Fellows of Harvard College | <a href="http://best-practices.dataverse.org/harvard-policies/harvard-privacy-policy.html" target="_blank">Privacy Policy</a>
+                        </p>
+                    </div>
+                    <div class="col-sm-4 text-right">
+                        <div class="poweredbylogo">
+                            <span>Powered by</span> 
+                            <a href="http://dataverse.org/" title="The Dataverse Project" target="_blank"><img src="https://cdn.rawgit.com/IQSS/dataverse/develop/src/main/webapp/resources/images/dataverseproject_logo.png" alt="The Dataverse Project" /></a>
+                        </div>
+                    </div>
+                </div>
+            </div>
+        </div>
+    </body>
+</html>
diff --git a/src/main/webapp/WEB-INF/beans.xml b/src/main/webapp/WEB-INF/beans.xml
index 4ca8195bea5..f6c5b8cbbc4 100644
--- a/src/main/webapp/WEB-INF/beans.xml
+++ b/src/main/webapp/WEB-INF/beans.xml
@@ -1,5 +1,7 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://java.sun.com/xml/ns/javaee"
+<beans xmlns="https://jakarta.ee/xml/ns/jakartaee"
        xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-       xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/beans_1_0.xsd">
+       xsi:schemaLocation="https://jakarta.ee/xml/ns/jakartaee https://jakarta.ee/xml/ns/jakartaee/beans_4_0.xsd"
+       bean-discovery-mode="all">
+    <!-- 2023-06: Note that if you change bean-discovery-mode from "all" to "allocated", SWORD APIs do not work. -->
 </beans>
diff --git a/src/main/webapp/WEB-INF/faces-config.xml b/src/main/webapp/WEB-INF/faces-config.xml
index 2015ca55f5f..6eeb5a65baf 100644
--- a/src/main/webapp/WEB-INF/faces-config.xml
+++ b/src/main/webapp/WEB-INF/faces-config.xml
@@ -1,8 +1,7 @@
-<faces-config xmlns="http://xmlns.jcp.org/xml/ns/javaee"
+<faces-config xmlns="https://jakarta.ee/xml/ns/jakartaee"
               xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-              xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/javaee
-        http://xmlns.jcp.org/xml/ns/javaee/web-facesconfig_2_2.xsd"
-              version="2.2">
+              xsi:schemaLocation="https://jakarta.ee/xml/ns/jakartaee https://jakarta.ee/xml/ns/jakartaee/web-facesconfig_4_0.xsd"
+              version="4.0">
     <application>
         <resource-bundle>
             <base-name>edu.harvard.iq.dataverse.util.LocalBundle</base-name>
diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml
index e56d7013abf..015a309fd6b 100644
--- a/src/main/webapp/WEB-INF/glassfish-web.xml
+++ b/src/main/webapp/WEB-INF/glassfish-web.xml
@@ -10,13 +10,18 @@
   </jsp-config>
   <parameter-encoding default-charset="UTF-8"/>
   <!-- Find a list of properties here: https://docs.oracle.com/cd/E19798-01/821-1750/beayb/index.html -->
-  <property name="alternatedocroot_1" value="from=/guides/* dir=./docroot"/>
-  <property name="alternatedocroot_2" value="from=/dataexplore/* dir=./docroot"/>
-  <property name="alternatedocroot_logos" value="from=/logos/* dir=./docroot"/>
-  <property name="alternatedocroot_sitemap" value="from=/sitemap/* dir=./docroot"/>
+  <!-- NOTE: relative paths given will be relative to com.sun.aas.instanceRoot (usually domain1 folder) -->
+  <!-- NOTE: As we cannot use variables in defaults of ${MPCONFIG}, there is a workaround for containers necessary;
+             see src/main/docker/Dockerfile. Once Payara upstream fixes this, we can use STORAGE_DIR here. -->
+  <property name="alternatedocroot_1" value="from=/guides/* dir=${MPCONFIG=dataverse.files.docroot:./docroot}"/>
+  <property name="alternatedocroot_2" value="from=/dataexplore/* dir=${MPCONFIG=dataverse.files.docroot:./docroot}"/>
+  <property name="alternatedocroot_3" value="from=/logos/* dir=${MPCONFIG=dataverse.files.docroot:./docroot}"/>
+  <property name="alternatedocroot_4" value="from=/sitemap/* dir=${MPCONFIG=dataverse.files.docroot:./docroot}"/>
   <!--
     This folder is not only holding compiled JSP pages but also the place where file streams are stored
     during uploads. As Dataverse does not use any JSP, there will only be uploads stored here.
   -->
+  <!-- NOTE: As we cannot use variables in defaults of ${MPCONFIG}, there is a workaround for containers necessary;
+             see src/main/docker/Dockerfile. Once Payara upstream fixes this, we can use STORAGE_DIR here. -->
   <property name="tempdir" value="${MPCONFIG=dataverse.files.uploads:./uploads}"/>
 </glassfish-web-app>
diff --git a/src/main/webapp/WEB-INF/web.xml b/src/main/webapp/WEB-INF/web.xml
index 8179ca970d5..427615f2f0b 100644
--- a/src/main/webapp/WEB-INF/web.xml
+++ b/src/main/webapp/WEB-INF/web.xml
@@ -1,10 +1,13 @@
 <?xml version="1.0" encoding="UTF-8"?>
-<web-app version="3.0" xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/javaee  http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd">
+<web-app xmlns="https://jakarta.ee/xml/ns/jakartaee"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="https://jakarta.ee/xml/ns/jakartaee https://jakarta.ee/xml/ns/jakartaee/web-app_6_0.xsd"
+         version="6.0">
     <display-name>Dataverse</display-name>
     <!-- Error page -->
     <error-page>
         <error-code>404</error-code>
-        <location>/404.xhtml</location>
+        <location>/404static.xhtml</location>
     </error-page>
     <error-page>
         <error-code>500</error-code>
@@ -27,7 +30,7 @@
         See also dev guide: https://guides.dataverse.org/en/latest/developers/debugging.html
      -->
     <context-param>
-        <param-name>javax.faces.PROJECT_STAGE</param-name>
+        <param-name>jakarta.faces.PROJECT_STAGE</param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.project-stage:Production}</param-value>
     </context-param>
@@ -43,23 +46,23 @@
     <!-- /context-param -->
     <context-param>
         <param-name>
-            javax.faces.INTERPRET_EMPTY_STRING_SUBMITTED_VALUES_AS_NULL
+            jakarta.faces.INTERPRET_EMPTY_STRING_SUBMITTED_VALUES_AS_NULL
         </param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.empty-string-null:true}</param-value>
     </context-param>
     <context-param>
-        <param-name>javax.faces.FACELETS_SKIP_COMMENTS</param-name>
+        <param-name>jakarta.faces.FACELETS_SKIP_COMMENTS</param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.skip-comments:true}</param-value>
     </context-param>
     <context-param>
-        <param-name>javax.faces.FACELETS_BUFFER_SIZE</param-name>
+        <param-name>jakarta.faces.FACELETS_BUFFER_SIZE</param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.buffer-size:102400}</param-value>
     </context-param>
     <context-param>
-        <param-name>javax.faces.FACELETS_REFRESH_PERIOD</param-name>
+        <param-name>jakarta.faces.FACELETS_REFRESH_PERIOD</param-name>
         <!-- Uses Microprofile Config to replace at runtime. Not standardized, Payara App Server specific. -->
         <param-value>${MPCONFIG=dataverse.jsf.refresh-period:-1}</param-value>
     </context-param>
@@ -86,14 +89,9 @@
     </filter-mapping>
     <servlet>
         <servlet-name>Faces Servlet</servlet-name>
-        <servlet-class>javax.faces.webapp.FacesServlet</servlet-class>
+        <servlet-class>jakarta.faces.webapp.FacesServlet</servlet-class>
         <load-on-startup>1</load-on-startup>
     </servlet>
-    <servlet>
-        <servlet-name>Push Servlet</servlet-name>
-        <servlet-class>org.primefaces.push.PushServlet</servlet-class>
-        <async-supported>true</async-supported>
-    </servlet>
     <!-- Map these files with JSF -->
     <servlet>
         <servlet-name>OAIServlet</servlet-name>
@@ -128,10 +126,6 @@
         <servlet-name>Faces Servlet</servlet-name>
         <url-pattern>*.xhtml</url-pattern>
     </servlet-mapping>
-    <servlet-mapping>
-        <servlet-name>Push Servlet</servlet-name>
-        <url-pattern>/primepush/*</url-pattern>
-    </servlet-mapping>
     <servlet-mapping>
         <servlet-name>OAIServlet</servlet-name>
         <url-pattern>/oai</url-pattern>
@@ -274,5 +268,13 @@
         <servlet-name>edu.harvard.iq.dataverse.api.datadeposit.SWORDv2ContainerServlet</servlet-name>
         <url-pattern>/dvn/api/data-deposit/v1.1/swordv2/edit/*</url-pattern>
     </servlet-mapping>
+    <filter>
+        <filter-name>edu.harvard.iq.dataverse.api.datadeposit.SwordFilter</filter-name>
+        <filter-class>edu.harvard.iq.dataverse.api.datadeposit.SwordFilter</filter-class>
+    </filter>
+    <filter-mapping>
+        <filter-name>edu.harvard.iq.dataverse.api.datadeposit.SwordFilter</filter-name>
+        <url-pattern>/dvn/api/data-deposit/v1.1/swordv2/edit-media/*</url-pattern>
+    </filter-mapping>
     <!-- END Data Deposit API (SWORD v2) -->
 </web-app>
diff --git a/src/main/webapp/contactFormFragment.xhtml b/src/main/webapp/contactFormFragment.xhtml
index 264fde98545..cb4eb3d0872 100644
--- a/src/main/webapp/contactFormFragment.xhtml
+++ b/src/main/webapp/contactFormFragment.xhtml
@@ -16,6 +16,12 @@
                         <p id="messageTo" class="form-control-static">#{sendFeedbackDialog.getMessageTo()}</p>
                     </div>
                 </div>
+                <div class="form-group" jsf:rendered="#{sendFeedbackDialog.ccSupport()}">
+                    <label for="messageCC" class="col-sm-3 control-label">#{bundle['contact.cc']}</label>
+                    <div class="col-sm-9">
+                        <p id="messageCC" class="form-control-static">#{sendFeedbackDialog.getMessageCC()}</p>
+                    </div>
+                </div>
                 <ui:fragment rendered="#{sendFeedbackDialog.isLoggedIn()}">
                 <!-- from email (logged in + auto-filled) -->
                     <div class="form-group">
diff --git a/src/main/webapp/dataset-citation.xhtml b/src/main/webapp/dataset-citation.xhtml
index 9baced25be0..b42dd5e563f 100644
--- a/src/main/webapp/dataset-citation.xhtml
+++ b/src/main/webapp/dataset-citation.xhtml
@@ -33,19 +33,13 @@
                     </button>
                     <ul class="dropdown-menu">
                         <li>
-                            <a jsf:id="endNoteLink" jsf:action="#{DatasetPage.fileDownloadService.downloadDatasetCitationXML(DatasetPage.dataset)}" >
-                                #{bundle['dataset.cite.downloadBtn.xml']}
-                            </a>
+                            <h:commandLink id="endNoteLink" value="#{bundle['dataset.cite.downloadBtn.xml']}" action="#{DatasetPage.fileDownloadService.downloadDatasetCitationXML(DatasetPage.dataset)}"/>
                         </li>
                         <li>
-                            <a jsf:id="risLink" jsf:actionListener="#{DatasetPage.fileDownloadService.downloadDatasetCitationRIS(DatasetPage.dataset)}">
-                                #{bundle['dataset.cite.downloadBtn.ris']}
-                            </a>
+                            <h:commandLink id="risLink" value="#{bundle['dataset.cite.downloadBtn.ris']}" action="#{DatasetPage.fileDownloadService.downloadDatasetCitationRIS(DatasetPage.dataset)}"/>
                         </li>
                         <li>
-                            <a jsf:id="bibLink" jsf:actionListener="#{DatasetPage.fileDownloadService.downloadDatasetCitationBibtex(DatasetPage.dataset)}" target="_blank">
-                                #{bundle['dataset.cite.downloadBtn.bib']}
-                            </a>
+                            <h:commandLink id="bibLink" value="#{bundle['dataset.cite.downloadBtn.bib']}" action="#{DatasetPage.fileDownloadService.downloadDatasetCitationBibtex(DatasetPage.dataset)}" target="_blank"/>
                         </li>
                     </ul>
                 </div>
diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml
index 86e52092622..c54d94442ea 100644
--- a/src/main/webapp/dataset-license-terms.xhtml
+++ b/src/main/webapp/dataset-license-terms.xhtml
@@ -7,6 +7,13 @@
                 xmlns:o="http://omnifaces.org/ui"
                 xmlns:jsf="http://xmlns.jcp.org/jsf">
     <!-- TERMS -->
+    <ui:param name="toaNotEmpty" value="#{!empty termsOfUseAndAccess.termsOfAccess
+                                        or !empty termsOfUseAndAccess.dataAccessPlace 
+                                        or !empty termsOfUseAndAccess.originalArchive or !empty termsOfUseAndAccess.availabilityStatus 
+                                        or !empty termsOfUseAndAccess.contactForAccess or !empty termsOfUseAndAccess.sizeOfCollection 
+                                        or !empty termsOfUseAndAccess.studyCompletion
+                                        or termsOfUseAndAccess.fileAccessRequest}"/>
+    
     <div class="text-right margin-bottom" 
          jsf:rendered="#{dataverseSession.user.authenticated and empty editMode and !widgetWrapper.widgetView
                          and permissionsWrapper.canIssueUpdateDatasetCommand(DatasetPage.dataset)}">
@@ -52,6 +59,7 @@
                                            <f:selectItem itemLabel="#{bundle['license.custom']}" itemValue="#{null}"/>
                                        </c:if>
                                        <p:ajax update="touFragment" />
+                                       <f:passThroughAttribute name="aria-label" value="#{bundle['file.dataFilesTab.terms.list.license']}"/>
                                    </p:selectOneMenu>
                                </ui:fragment>
                                <ui:fragment rendered="#{!empty termsOfUseAndAccess.license}">
@@ -237,17 +245,13 @@
                 </p:fragment>
             </div>
         </div>
-       <div class="panel panel-default" jsf:rendered="#{!empty editMode or DatasetPage.restrictedFileCount > 0 
-                                                        or !empty termsOfUseAndAccess.termsOfAccess
-                                                        or !empty termsOfUseAndAccess.dataAccessPlace 
-                                        or !empty termsOfUseAndAccess.originalArchive or !empty termsOfUseAndAccess.availabilityStatus 
-                                        or !empty termsOfUseAndAccess.contactForAccess or !empty termsOfUseAndAccess.sizeOfCollection 
-                                        or !empty termsOfUseAndAccess.studyCompletion}">
+       <div class="panel panel-default" jsf:rendered="#{managePage==true or !empty editMode or (datasetPage == true and DatasetPage.restrictedFileCount > 0) 
+                                                        or toaNotEmpty}">
            <div data-toggle="collapse" data-target="#panelCollapseTOA" class="panel-heading text-info">
                <h:outputText value="#{publicStore ? bundle['file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess'] : bundle['file.dataFilesTab.terms.list.termsOfAccess.header']}"/>
                &#160;<span class="glyphicon glyphicon-chevron-up"/>
            </div>
-           <div id="panelCollapseTOA" class="#{!datasetPage or DatasetPage.hasValidTermsOfAccess ? 'collapse in' : 'collapse'}">
+           <div id="panelCollapseTOA" class="panel-collapse #{!empty editMode or (datasetPage == false and toaNotEmpty )  or (datasetPage == true and DatasetPage.hasValidTermsOfAccess) ? 'collapse in' : 'collapse'}">
                <div class="panel-body">
                    <ui:fragment rendered="#{empty editMode}">
                        <div class="form-group" jsf:rendered="#{!publicStore  and datasetPage== true and DatasetPage.hasRestrictedFiles}">
@@ -266,7 +270,7 @@
                                </div>
                            </ui:fragment>
                        </div>
-                       <div class="form-group" jsf:rendered="#{!empty termsOfUseAndAccess.termsOfAccess and DatasetPage.restrictedFileCount > 0}">
+                       <div class="form-group" jsf:rendered="#{!empty termsOfUseAndAccess.termsOfAccess and (DatasetPage.restrictedFileCount > 0 or managePage==true)}">
                            <label for="datasetForm:tabView:metadata_TermsAccessText" class="col-sm-3 control-label">
                                #{bundle['file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess']}
                                <span class="glyphicon glyphicon-question-sign tooltip-icon"
@@ -276,18 +280,18 @@
                                <p><h:outputText id="metadata_TermsAccessText" value="#{MarkupChecker:sanitizeBasicHTML(termsOfUseAndAccess.termsOfAccess)}" escape="false"/></p>
                            </div>
                        </div>
-                       <div class="form-group" jsf:rendered="#{!publicStore and ( datasetPage == true and DatasetPage.hasRestrictedFiles)}">
+                       <div class="form-group" jsf:rendered="#{!publicStore and ((datasetPage == true and DatasetPage.hasRestrictedFiles) or managePage==true)}">
                            <label for="datasetForm:tabView:metadata_RequestAccessSelected" class="col-sm-3 control-label">
                                #{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess']}
                                <span class="glyphicon glyphicon-question-sign tooltip-icon"
                                      data-toggle="tooltip" data-placement="auto right" data-original-title="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title']}"></span>
                            </label>
                            <div id="metadata_RequestAccessSelected" class="col-sm-9">
-                               <p jsf:rendered="#{!DatasetPage.workingVersion.draft}">
+                               <p jsf:rendered="#{datasetPage == true and !DatasetPage.workingVersion.draft}">
                                    <h:outputText value="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request']}" rendered="#{DatasetPage.dataset.fileAccessRequest}"/>
                                    <h:outputText value="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest']}" rendered="#{!DatasetPage.dataset.fileAccessRequest}"/>
                                </p>
-                               <p jsf:rendered="#{DatasetPage.workingVersion.draft}">
+                               <p jsf:rendered="#{(datasetPage == true and DatasetPage.workingVersion.draft) or managePage==true}">
                                    <h:outputText value="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request']}" rendered="#{termsOfUseAndAccess.fileAccessRequest}"/>
                                    <h:outputText value="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest']}" rendered="#{!termsOfUseAndAccess.fileAccessRequest}"/>
                                </p>
diff --git a/src/main/webapp/dataset-versions.xhtml b/src/main/webapp/dataset-versions.xhtml
index b1612a314fc..fe0758d74f0 100644
--- a/src/main/webapp/dataset-versions.xhtml
+++ b/src/main/webapp/dataset-versions.xhtml
@@ -11,7 +11,7 @@
 <ui:fragment rendered="#{widgetWrapper.widgetView}">
     <p class="help-block">
         <h:outputFormat value="#{bundle['file.dataFilesTab.versions.widget.viewMoreInfo']}" escape="false">
-            <f:param value="#{DatasetPage.dataset.globalIdString}"/>
+            <f:param value="#{DatasetPage.dataset.globalId.asString()}"/>
             <f:param value="#{DatasetPage.dataset.displayName}"/>
             <f:param value="#{dataverseServiceBean.findRootDataverse().displayName}"/>
         </h:outputFormat>
@@ -59,11 +59,11 @@
             <ui:fragment rendered="#{versionTab.released 
                                      or ((versionTab.deaccessioned or versionTab.draft) and permissionServiceBean.on(DatasetPage.dataset).has('ViewUnpublishedDataset'))}">
                 <ui:fragment rendered="#{!(versionTab.released or versionTab.deaccessioned)}">
-                    <a href="/dataset.xhtml?persistentId=#{versionTab.dataset.globalIdString}&#38;version=#{versionTab.versionState}" class="ui-commandlink ui-widget">
+                    <a href="/dataset.xhtml?persistentId=#{versionTab.dataset.globalId.asString()}&#38;version=#{versionTab.versionState}" class="ui-commandlink ui-widget">
                         <h:outputText rendered="#{!(versionTab.released or versionTab.deaccessioned)}" value="#{versionTab.versionState}" /></a>
                 </ui:fragment>
                 <ui:fragment rendered="#{(versionTab.released or versionTab.deaccessioned)}">
-                    <a href="/dataset.xhtml?persistentId=#{versionTab.dataset.globalIdString}&#38;version=#{versionTab.versionNumber}.#{versionTab.minorVersionNumber}" class="ui-commandlink ui-widget">
+                    <a href="/dataset.xhtml?persistentId=#{versionTab.dataset.globalId.asString()}&#38;version=#{versionTab.versionNumber}.#{versionTab.minorVersionNumber}" class="ui-commandlink ui-widget">
                         <h:outputText rendered="#{versionTab.released or versionTab.deaccessioned}" value="#{versionTab.versionNumber}.#{versionTab.minorVersionNumber}" /></a>
                 </ui:fragment>
             </ui:fragment>
@@ -92,7 +92,7 @@
                 <ui:fragment rendered="#{!empty(versionTab.defaultVersionDifference.blockDataForNote)}">
                     <ui:repeat value="#{versionTab.defaultVersionDifference.blockDataForNote}" var="blockNote">
                         <h:outputText styleClass="highlightBold" rendered="#{blockNote[0].datasetFieldType.metadataBlock.displayName == 'Citation Metadata'}" value="#{bundle['file.dataFilesTab.versions.additionalCitationMetadata']} " />
-                        <h:outputText styleClass="highlightBold" rendered="#{!(blockNote[0].datasetFieldType.metadataBlock.displayName == 'Citation Metadata')}" value=" #{blockNote[0].datasetFieldType.metadataBlock.displayName}: " />
+                        <h:outputText styleClass="highlightBold" rendered="#{!(blockNote[0].datasetFieldType.metadataBlock.displayName == 'Citation Metadata')}" value=" #{blockNote[0].datasetFieldType.metadataBlock.localeDisplayName}: " />
                         <h:outputText value=" (" />
                         <h:outputText rendered="#{blockNote[1] > 0}" value="#{blockNote[1]} #{bundle['file.dataFilesTab.versions.added']}" />
                         <h:outputText rendered="#{(blockNote[1]) > 0 and (blockNote[2] + blockNote[3]) > 0}" value=", " />
diff --git a/src/main/webapp/dataset-widgets.xhtml b/src/main/webapp/dataset-widgets.xhtml
index 93072952a36..f635099dfdb 100644
--- a/src/main/webapp/dataset-widgets.xhtml
+++ b/src/main/webapp/dataset-widgets.xhtml
@@ -139,7 +139,7 @@
                             </div>
                         </ui:fragment>
                         <div class="button-block">
-                            <h:outputLink class="btn btn-default" value="/dataset.xhtml?persistentId=#{DatasetWidgetsPage.dataset.globalIdString}">
+                            <h:outputLink class="btn btn-default" value="/dataset.xhtml?persistentId=#{DatasetWidgetsPage.dataset.globalId.asString()}">
                                 <h:outputText value="#{bundle.done}"/></h:outputLink>
                         </div>
                     </p:tab>
diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml
index 6b91f815d9a..e50e68ec162 100644
--- a/src/main/webapp/dataset.xhtml
+++ b/src/main/webapp/dataset.xhtml
@@ -44,6 +44,7 @@
             <ui:param name="publishDataset" value="#{DatasetPage.publishDatasetPopup()}"/>
             <ui:param name="releaseDraft" value="#{DatasetPage.releaseDraftPopup()}"/>
             <ui:param name="releaseBoth" value="#{DatasetPage.publishBothPopup()}"/>
+            <ui:param name="valid" value="#{DatasetPage.valid}"/>
             <ui:define name="meta_header">
                 <ui:fragment rendered="#{!DatasetPage.anonymizedAccess}">
                     <meta name="description" content="#{DatasetPage.description}"/>
@@ -88,6 +89,11 @@
                 </ui:fragment>
             </ui:define>
             <ui:define name="body">
+                <!-- Add Signposting-->
+                <c:if test="#{not empty DatasetPage.getSignpostingLinkHeader()}">
+                    <f:event type="preRenderView" listener="#{facesContext.externalContext.response.setHeader('Link', DatasetPage.getSignpostingLinkHeader())}" />
+                </c:if>
+                <!-- End add Signposting-->
                 <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
                 <f:metadata>
                     <f:viewParam name="id" value="#{DatasetPage.id}"/>
@@ -98,6 +104,8 @@
                     <f:viewParam name="showIngestSuccess" value="#{DatasetPage.showIngestSuccess}"/>
                     <f:viewParam name="fileSortField" value="#{DatasetPage.fileSortField}"/>
                     <f:viewParam name="fileSortOrder" value="#{DatasetPage.fileSortOrder}"/>
+                    <o:viewParam name="tagPresort" value="#{DatasetPage.tagPresort}" default="true"/>
+                    <o:viewParam name="folderPresort" value="#{DatasetPage.folderPresort}" default="true"/>
                     <f:viewParam name="q" value="#{DatasetPage.fileLabelSearchTerm}"/>
                     <f:viewParam name="fileTypeGroupFacet" value="#{DatasetPage.fileTypeFacet}"/>
                     <f:viewParam name="fileAccess" value="#{DatasetPage.fileAccessFacet}"/>
@@ -127,6 +135,7 @@
                                             <h:outputText value="#{bundle['dataset.versionUI.inReview']}" styleClass="label label-success" rendered="#{DatasetPage.workingVersion.inReview}"/>
                                             <h:outputText value="#{bundle['dataset.versionUI.unpublished']}" styleClass="label label-warning" rendered="#{!DatasetPage.dataset.released}"/>
                                             <h:outputText value="#{bundle['dataset.versionUI.deaccessioned']}" styleClass="label label-danger" rendered="#{DatasetPage.workingVersion.deaccessioned}"/>
+                                            <h:outputText value="#{bundle['incomplete']}" styleClass="label label-danger" rendered="#{!valid}"/>
                                             <o:importFunctions type="edu.harvard.iq.dataverse.dataset.DatasetUtil" />
                                             <h:outputText value="#{DatasetUtil:getLocaleExternalStatus(DatasetPage.workingVersion.externalStatusLabel)}" styleClass="label label-info" rendered="#{DatasetPage.workingVersion.externalStatusLabel!=null and DatasetPage.canPublishDataset()}"/>
                                             <!-- DATASET VERSION NUMBER -->
@@ -169,11 +178,12 @@
                                                                 <!-- NORMAL DOWNLOAD BUTTON (NO TABULAR FILES) -->
                                                                 <li jsf:rendered="#{!DatasetPage.versionHasTabular}">                 
                                                                     <p:commandLink update="@form" actionListener="#{DatasetPage.startDownloadAllOriginal()}" styleClass="btn-download" 
-                                                                                   oncomplete="showPopup();">
+                                                                                   oncomplete="showPopup(false);">
                                                                     #{bundle.download}
                                                                         <h:outputFormat value="#{bundle['dataset.accessBtn.download.size']}">
                                                                             <f:param value="#{DatasetPage.sizeOfDataset}" />
                                                                         </h:outputFormat>
+                                                                        <f:actionListener binding="#{DatasetPage.setTermsGuestbookPopupAction(bundle.download)}"/>
                                                                     </p:commandLink>
                                                                 </li>
 
@@ -182,12 +192,13 @@
                                                                     <!-- DOWNLOAD ORIGINAL BUTTON (TABULAR FILES PRESENT) -->
                                                                     <li jsf:rendered="#{ DatasetPage.sizeOfDatasetOrig != '0 B'  and !(DatasetPage.tooLargeToDownloadOriginal) }">
                                                                         <p:commandLink update="@form" actionListener="#{DatasetPage.startDownloadAllOriginal()}" 
-                                                                                       oncomplete="showPopup();"
+                                                                                       oncomplete="showPopup(false);"
                                                                                        styleClass="btn-download">
                                                                             #{bundle.downloadOriginal}
                                                                             <h:outputFormat value="#{bundle['dataset.accessBtn.download.size']}">
                                                                                 <f:param value="#{DatasetPage.sizeOfDatasetOrig}" />
                                                                             </h:outputFormat>
+                                                                            <f:actionListener binding="#{DatasetPage.setTermsGuestbookPopupAction(bundle.download)}"/>
                                                                         </p:commandLink>
                                                                     </li>
                                                                     <!-- Original Format too big -->
@@ -197,7 +208,7 @@
                                                                     </ui:fragment>
                                                                     <!-- DOWNLOAD ARCHIVAL FILES (TABULAR FILES PRESENT) -->
                                                                     <li jsf:rendered="#{!DatasetPage.tooLargeToDownloadArchival}">
-                                                                        <p:commandLink update="@form" oncomplete="showPopup();" 
+                                                                        <p:commandLink update="@form" oncomplete="showPopup(false);" 
                                                                                        actionListener="#{DatasetPage.startDownloadAllArchival()}" styleClass="btn-download">
                                                                                 #{bundle.downloadArchival}
                                                                             <h:outputFormat value="#{bundle['dataset.accessBtn.download.size']}">
@@ -219,9 +230,14 @@
                                                             </ui:fragment>
                                                             <ui:fragment rendered="#{settingsWrapper.globusDownload and settingsWrapper.isGlobusEnabledStorageDriver(DatasetPage.dataset.effectiveStorageDriverId)}">
                                                                 <li>
-                                                                    <h:commandLink styleClass="btn-download" action="#{DatasetPage.startGlobusTransfer()}">
-                                                                        <h:outputText value="Globus Transfer"/>
-                                                                    </h:commandLink>
+                                                                    <p:commandLink update="@form" oncomplete="showPopup(true);" 
+                                                                                       actionListener="#{DatasetPage.startGlobusTransfer(true, false)}" styleClass="btn-download">
+                                                                                #{bundle.transfer}
+                                                                        <h:outputFormat value="#{bundle['dataset.accessBtn.transfer.size']}">
+                                                                            <f:param value="#{DatasetPage.sizeOfDataset}" />
+                                                                        </h:outputFormat>
+                                                                        <f:actionListener binding="#{DatasetPage.setTermsGuestbookPopupAction(bundle.download)}"/>
+                                                                        </p:commandLink>
                                                                 </li>
                                                             </ui:fragment>
                                                             
@@ -306,13 +322,13 @@
                                             <div class="btn-group btn-group-justified" jsf:rendered="#{showPublishLink or showSubmitForReviewLink}">
                                                 <div class="btn-group">
                                                     <!-- Publish BTN -->
-                                                    <h:outputLink value="#" disabled="#{DatasetPage.lockedFromPublishing or !DatasetPage.hasValidTermsOfAccess }">
+                                                    <h:outputLink value="#" disabled="#{DatasetPage.lockedFromPublishing or !DatasetPage.hasValidTermsOfAccess or !valid}">
                                                         <c:if test="#{!((showSubmitForReviewLink or showReturnToAuthorLink or not empty DatasetPage.allowedExternalStatuses) and showPublishLink)}">
-                                                            <f:passThroughAttribute name="class" value="btn btn-default btn-access btn-publish #{DatasetPage.lockedFromPublishing or !DatasetPage.hasValidTermsOfAccess ? 'disabled' : ''}"/>
+                                                            <f:passThroughAttribute name="class" value="btn btn-default btn-access btn-publish #{DatasetPage.lockedFromPublishing or !DatasetPage.hasValidTermsOfAccess or !valid ? 'disabled' : ''}"/>
                                                             <f:passThroughAttribute name="onclick" value="$(this).parent().find( 'li > a' ).trigger( 'click' );"/>
                                                         </c:if>
                                                         <c:if test="#{(showSubmitForReviewLink or showReturnToAuthorLink or not empty DatasetPage.allowedExternalStatuses) and showPublishLink}">
-                                                            <f:passThroughAttribute name="class" value="btn btn-default btn-access btn-publish dropdown-toggle #{DatasetPage.lockedFromPublishing or !DatasetPage.hasValidTermsOfAccess ? 'disabled' : ''}"/>
+                                                            <f:passThroughAttribute name="class" value="btn btn-default btn-access btn-publish dropdown-toggle #{DatasetPage.lockedFromPublishing or !DatasetPage.hasValidTermsOfAccess  or !valid ? 'disabled' : ''}"/>
                                                             <f:passThroughAttribute name="data-toggle" value="dropdown"/>
                                                             <f:passThroughAttribute name="aria-haspopup" value="true"/>
                                                             <f:passThroughAttribute name="aria-expanded" value="false"/>
@@ -444,6 +460,17 @@
                                                                 <h:outputText value="#{bundle['dataset.editBtn.itemLabel.thumbnailsAndWidgets']}"/>
                                                             </h:outputLink>
                                                         </li>
+                                                        <ui:fragment rendered="#{DatasetPage.datasetConfigureTools.size() >= 1}">
+                                                            <li class="dropdown-header">#{bundle['dataset.accessBtn.header.configure']} <span class="glyphicon glyphicon-cog"/></li>
+                                                            <!-- Configure tool links -->
+                                                            <ui:repeat var="tool" value="#{DatasetPage.datasetConfigureTools}">
+                                                                <li>
+                                                                    <p:commandLink styleClass="btn-explore" actionListener="#{DatasetPage.setDatasetConfigureTool(tool)}" process="@this" update="datasetForm:configureToolDialog" oncomplete="PF('configureToolDialog').show();">
+                                                                        <h:outputText value="#{tool.getDisplayNameLang()}"/>
+                                                                    </p:commandLink>
+                                                                </li>
+                                                            </ui:repeat>
+                                                        </ui:fragment>
                                                         <ui:fragment rendered="#{!DatasetPage.dataset.released and DatasetPage.dataset.latestVersion.versionState=='DRAFT' and permissionsWrapper.canIssueDeleteDatasetCommand(DatasetPage.dataset)}">
                                                             <li role="separator" class="divider"></li>
                                                             <li>
@@ -513,7 +540,7 @@
                                         <!-- Metrics -->
                                         <div id="metrics-block">
                                             <div id="metrics-heading">
-                                                #{bundle['metrics.dataset.title']}
+                                                #{settingsWrapper.makeDataCountDisplayEnabled ? bundle['metrics.dataset.makedatacount.title'] : bundle['metrics.dataset.title']}
                                                 <ui:fragment rendered="#{!settingsWrapper.makeDataCountDisplayEnabled}">
                                                     <span class="glyphicon glyphicon-question-sign tooltip-icon" data-toggle="tooltip" data-placement="auto top" 
                                                           data-trigger="hover" data-original-title="#{bundle['metrics.dataset.tip.default']}"></span>
@@ -521,13 +548,15 @@
                                                 <ui:fragment rendered="#{settingsWrapper.makeDataCountDisplayEnabled}">
                                                     <a tabindex="0" role="button" class="glyphicon glyphicon-question-sign tooltip-icon" data-toggle="popover" data-placement="auto top" 
                                                        data-trigger="focus" data-html="true" data-content="#{bundle['metrics.dataset.tip.makedatacount']}"></a>
+                                                        <div id="metrics-heading-subtitle">#{bundle['metrics.dataset.makedatacount.since']} #{settingsWrapper.getMDCStartDate().toString()}</div>
                                                 </ui:fragment>
+                                                
                                             </div>
                                             <div id="metrics-body">
                                                 <!-- Classic downloads -->
                                                 <div class="metrics-count-block" jsf:rendered="#{!settingsWrapper.makeDataCountDisplayEnabled}">
                                                     <h:outputFormat value="{0} #{bundle['metrics.downloads']}">
-                                                        <f:param value="#{guestbookResponseServiceBean.getCountGuestbookResponsesByDatasetId(DatasetPage.dataset.id)}"/>
+                                                        <f:param value="#{guestbookResponseServiceBean.getDownloadCountByDatasetId(DatasetPage.dataset.id)}"/>
                                                     </h:outputFormat>
                                                     <span class="glyphicon glyphicon-question-sign tooltip-icon"
                                                           data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['metrics.dataset.downloads.default.tip']}"></span>
@@ -547,6 +576,13 @@
                                                     </h:outputFormat>
                                                     <span class="glyphicon glyphicon-question-sign tooltip-icon"
                                                           data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['metrics.dataset.downloads.makedatacount.tip']}"></span>
+                                                    <span jsf:rendered="#{settingsWrapper.getMDCStartDate()!=null}">        
+                                                        <h:outputFormat value="#{bundle['metrics.downloads.nonMDC']}">
+                                                            <f:param value="#{guestbookResponseServiceBean.getDownloadCountByDatasetId(DatasetPage.dataset.id, settingsWrapper.getMDCStartDate())}"/>
+                                                        </h:outputFormat>
+                                                        <span jsf:rendered="#{guestbookResponseServiceBean.getDownloadCountByDatasetId(DatasetPage.dataset.id, settingsWrapper.getMDCStartDate()) > 0}" class="glyphicon glyphicon-question-sign tooltip-icon"
+                                                            data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['metrics.dataset.downloads.premakedatacount.tip']}"></span>)
+                                                        </span>
                                                 </div>
                                                 <!-- Make Data Count citations (DOIs only, not Handles) -->
                                                 <div class="metrics-count-block" jsf:rendered="#{settingsWrapper.makeDataCountDisplayEnabled and settingsWrapper.doiInstallation}">
@@ -761,7 +797,7 @@
                             <ui:param name="editMode" value="#{!empty DatasetPage.editMode ? DatasetPage.editMode : ''}"/>
                             <ui:param name="metadataBlocks" value="#{!empty DatasetPage.editMode ? DatasetPage.datasetVersionUI.metadataBlocksForEdit.entrySet().toArray(): DatasetPage.datasetVersionUI.metadataBlocksForView.entrySet().toArray()}"/>
                             <!-- ui:param name="publicationDate" value="#{DatasetPage.dataset.publicationDateFormattedYYYYMMDD}"/-->
-                            <ui:param name="globalId" value="#{DatasetPage.dataset.globalIdString}"/>
+                            <ui:param name="globalId" value="#{DatasetPage.dataset.globalId.asString()}"/>
                             <ui:param name="mdLang" value="#{DatasetPage.getLocaleDisplayName(DatasetPage.getEffectiveMetadataLanguage(true))}"/>
                             <ui:param name="mdLangCode" value="#{DatasetPage.getEffectiveMetadataLanguage(true)}"/>
                         </ui:include>
@@ -902,7 +938,7 @@
                                     <ui:param name="metadataBlocks" value="#{!empty DatasetPage.editMode ? DatasetPage.datasetVersionUI.metadataBlocksForEdit.entrySet().toArray(): DatasetPage.datasetVersionUI.metadataBlocksForView.entrySet().toArray()}"/>
                                     <ui:param name="publicationDate" value="#{DatasetPage.dataset.publicationDate != null ? DatasetPage.dataset.publicationDateFormattedYYYYMMDD : ''}"/>
                                     <ui:param name="citationDate" value="#{DatasetPage.dataset.citationDate != null ? DatasetPage.dataset.citationDateFormattedYYYYMMDD : ''}"/>
-                                    <ui:param name="globalId" value="#{DatasetPage.dataset.globalIdString}"/>
+                                    <ui:param name="globalId" value="#{DatasetPage.dataset.globalId.asString()}"/>
                                     <ui:param name="altPID" value="#{DatasetPage.dataset.alternativePersistentIdentifier}"/>
                                     <ui:param name="mdLang" value="#{DatasetPage.getLocaleDisplayName(DatasetPage.effectiveMetadataLanguage)}"/>
                                     <ui:param name="mdLangCode" value="#{DatasetPage.effectiveMetadataLanguage}"/>
@@ -972,6 +1008,19 @@
                             </button>
                         </div>
                     </p:dialog>
+                    <p:dialog id="configureToolDialog" styleClass="smallPopUp" header="#{DatasetPage.datasetConfigureTool.displayNameLang}" widgetVar="configureToolDialog" modal="true">
+                        <p class="help-block">
+                            <h:outputFormat value="#{DatasetPage.datasetConfigureTool.description}" escape="false"/>
+                        </p>
+                        <div class="button-block">
+                            <p:commandLink styleClass="btn btn-default" action="#{DatasetPage.configure(DatasetPage.datasetConfigureTool)}" oncomplete="PF('configureToolDialog').hide();">
+                                <h:outputFormat value="#{bundle['continue']}"/>
+                            </p:commandLink>
+                            <button class="btn btn-link" onclick="PF('configureToolDialog').hide();" type="button">
+                                #{bundle.cancel}
+                            </button>
+                        </div>
+                    </p:dialog>
                     <p:dialog id="citationsDialog" styleClass="smallPopUp" header="#{bundle['metrics.citations.dialog.header']}" widgetVar="citationsDialog" modal="true">
                         <p class="help-block">
                             <h:outputFormat value="#{bundle['metrics.citations.dialog.help']}" escape="false">
@@ -1041,16 +1090,38 @@
                         <div class="button-block">
                             <p class="help-block">#{bundle['dataset.downloadUnrestricted']}</p>
                             <p:commandButton styleClass="btn btn-default" value="#{bundle.continue}" onclick="PF('downloadMixed').hide()" 
-                                             rendered="#{!DatasetPage.downloadPopupRequired}"
+                                             rendered="#{!DatasetPage.guestbookAndTermsPopupRequired}"
                                              action="#{DatasetPage.startMultipleFileDownload()}"/>
                             <p:commandButton styleClass="btn btn-default" value="#{bundle.continue}" onclick="PF('downloadMixed').hide();"
-                                             rendered="#{DatasetPage.downloadPopupRequired and !settingsWrapper.rsyncDownload}"
-                                             oncomplete="PF('downloadPopup').show();" />
+                                             rendered="#{DatasetPage.guestbookAndTermsPopupRequired and !settingsWrapper.rsyncDownload}"
+                                             oncomplete="PF('guestbookAndTermsPopup').show();" />
                             <button class="btn btn-link" onclick="PF('downloadMixed').hide();" type="button">
                                 #{bundle.cancel}
                             </button>
                         </div>
                     </p:dialog>
+                    <p:dialog id="globusTransferMixed" styleClass="smallPopUp" header="#{bundle['dataset.inValidSelectedFilesForTransfer']}" widgetVar="globusTransferMixed" modal="true">
+                        <p class="text-danger"><span class="glyphicon glyphicon-exclamation-sign"/> #{bundle['dataset.mixedSelectedFilesForTransfer']}</p>
+                        <table>
+                            <ui:repeat var="resFile" value="#{DatasetPage.selectedNonGlobusTransferableFiles}" >
+                                <tr>
+                                    <td>#{resFile.label}</td>
+                                </tr>
+                            </ui:repeat>
+                        </table>
+                        <div class="button-block">
+                            <p class="help-block">#{bundle['dataset.transferUnrestricted']}</p>
+                            <p:commandButton styleClass="btn btn-default" value="#{bundle.continue}" onclick="PF('globusTransferMixed').hide()" 
+                                             rendered="#{!DatasetPage.guestbookAndTermsPopupRequired}"
+                                             action="#{DatasetPage.startGlobusTransfer(false, true)}"/>
+                            <p:commandButton styleClass="btn btn-default" value="#{bundle.continue}" onclick="PF('globusTransferMixed').hide();"
+                                             rendered="#{DatasetPage.guestbookAndTermsPopupRequired and !settingsWrapper.rsyncDownload}"
+                                             oncomplete="PF('guestbookAndTermsPopup').show();" />
+                            <button class="btn btn-link" onclick="PF('globusTransferMixed').hide();" type="button">
+                                #{bundle.cancel}
+                            </button>
+                        </div>
+                    </p:dialog>
                     <p:dialog id="deleteConfirmation" styleClass="smallPopUp" header="#{bundle['file.deleteDialog.header']}" widgetVar="deleteConfirmation" modal="true">
                         <p class="text-warning"><span class="glyphicon glyphicon-warning-sign"/> #{bundle['file.deleteDialog.tip']}</p>
                         <div class="button-block">
@@ -1487,16 +1558,21 @@
                         </div>
                     </p:dialog>
                     <!-- END: Request Access Sign Up/Log In Button -->
-                    <p:dialog id="downloadPopup" styleClass="largePopUp" header="#{bundle['file.downloadDialog.header']}" widgetVar="downloadPopup" modal="true">
+                    <p:dialog id="guestbookAndTermsPopup" styleClass="largePopUp" header="#{bundle['file.downloadDialog.header']}" widgetVar="guestbookAndTermsPopup" modal="true">
                         <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
-                        <ui:include src="file-download-popup-fragment.xhtml">
+                        <ui:include src="guestbook-terms-popup-fragment.xhtml">
                             <ui:param name="popupContext" value="downloadDataset"/>
                             <ui:param name="workingVersion" value="#{DatasetPage.workingVersion}"/>
-                            <ui:param name="downloadPopupRequired" value="#{DatasetPage.downloadPopupRequired}"/>
+                            <ui:param name="guestbookAndTermsPopupRequired" value="#{DatasetPage.guestbookAndTermsPopupRequired}"/>
+                            <ui:param name="guestbookPopupRequired" value="#{DatasetPage.guestbookPopupRequired}"/>
                             <ui:param name="fileMetadataForAction" value="#{DatasetPage.fileMetadataForAction}"/>
                             <ui:param name="hasRestrictedFile" value="#{DatasetPage.downloadingRestrictedFiles()}"/>
                             <ui:param name="guestbookResponse" value="#{DatasetPage.guestbookResponse}" rendered="#{!settingsWrapper.rsyncDownload}"/>
+                            <ui:param name="fileDownloadHelper" value="#{DatasetPage.fileDownloadHelper}"/>
                             <ui:param name="fileDownloadService" value="#{DatasetPage.fileDownloadService}"/>
+                            <ui:param name="termsGuestbookPopupAction" value="#{DatasetPage.termsGuestbookPopupAction}"/>
+                            <ui:param name="guestbookPopupRequiredAtDownload" value="#{DatasetPage.guestbookPopupRequiredAtDownload}"/>
+                            <ui:param name="isGlobusTransfer" value="#{DatasetPage.globusTransferRequested}"/>
                         </ui:include>
                     </p:dialog>
                     <!-- Preview Guestbook -->
@@ -1523,19 +1599,11 @@
                         <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
                         <ui:include src="package-download-popup-fragment.xhtml">
                             <ui:param name="workingVersion" value="#{DatasetPage.workingVersion}"/>
-                            <ui:param name="downloadPopupRequired" value="#{DatasetPage.downloadPopupRequired}"/>
+                            <ui:param name="guestbookAndTermsPopupRequired" value="#{DatasetPage.guestbookAndTermsPopupRequired}"/>
                             <ui:param name="guestbookResponse" value="#{DatasetPage.guestbookResponse}" rendered="#{!settingsWrapper.rsyncDownload}"/>
                             <ui:param name="fileDownloadService" value="#{DatasetPage.fileDownloadService}"/>
                         </ui:include>
                     </p:dialog>
-                    <p:dialog id="requestAccessPopup" styleClass="largePopUp" header="#{bundle['file.requestAccess']}" widgetVar="requestAccessPopup" modal="true">
-                        <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
-                        <ui:include src="file-request-access-popup-fragment.xhtml">
-                            <ui:param name="workingVersion" value="#{DatasetPage.workingVersion}"/>
-                            <ui:param name="someActivelyEmbargoedFiles" value="#{DatasetPage.cantRequestDueToEmbargo}"/>
-                            <ui:param name="fileDownloadService" value="#{DatasetPage.fileDownloadService}"/>
-                        </ui:include>
-                    </p:dialog>
                     <p:dialog id="linkDatasetForm" styleClass="largePopUp" header="#{bundle['dataset.link.title']}" widgetVar="linkDatasetForm" modal="true" rendered="#{DatasetPage.showLinkingPopup}">
                         <p:focus for="dataverseLinkName"/>
                         <div class="form-horizontal">
@@ -1582,6 +1650,12 @@
                                 </div>
                             </div>
                         </div>
+                        <div>
+                            <p:fragment rendered="#{!empty DatasetPage.alreadyLinkedDataverses}"> 
+                            <h:outputLabel  value="#{bundle['dataset.linking.popop.already.linked.note']}"/>&#160;
+                            <h:outputText rendered="#{!empty DatasetPage.alreadyLinkedDataverses }" value="#{DatasetPage.alreadyLinkedDataverses}"/>
+                            </p:fragment>
+                        </div>
                         <div class="button-block">
                             <p:commandButton id="saveLinkButton" styleClass="btn btn-default"    
                                              update="linkNameContent @([id$=Messages])" 
@@ -1630,11 +1704,14 @@
                         <p class="text-warning">
                             <span class="glyphicon glyphicon-warning-sign"/> #{bundle['dataset.submitMessage']}
                         </p>
+                        <p jsf:rendered="#{!valid}" class="text-warning">
+                            <span class="glyphicon glyphicon-warning-sign"/><b style="color:red;"> #{bundle['dataset.message.incomplete.warning']}</b>
+                        </p>
                         <c:if test="#{showSubmitForReviewLink}">
                             <ui:include src="datasetLicenseInfoFragment.xhtml"/>
                         </c:if>
                         <div class="button-block">
-                            <p:commandButton styleClass="btn btn-default" value="#{bundle.submit}" 
+                            <p:commandButton rendered="#{DatasetPage.validOrCanReviewIncomplete}" styleClass="btn btn-default" value="#{bundle.submit}" 
                                              onclick="PF('inreview').hide();
                                                      PF('blockDatasetForm').hide();" action="#{DatasetPage.submitDataset}" immediate="true"/>
                             <button class="btn btn-link" onclick="PF('inreview').hide();PF('blockDatasetForm').hide();" type="button">
@@ -1646,7 +1723,8 @@
                     <p:dialog id="publishDataset" width="70%" header="#{bundle['dataset.publish.header']}" widgetVar="publishDataset" modal="true">
                         <ui:fragment rendered="#{publishDataset}">
                             <div class="form-group">
-                                <p class="col-sm-12 text-warning">#{bundle['dataset.publish.tip']}</p>
+                                <p jsf:rendered="#{valid}" class="col-sm-12 text-warning">#{bundle['dataset.publish.tip']}</p>
+                                <p jsf:rendered="#{!valid}" class="col-sm-12 text-warning"><b style="color:red;">#{bundle['dataset.message.incomplete.warning']}</b></p>
                                 <p class="col-sm-12 help-block">#{bundle['dataset.publish.terms.tip']}</p>
                             </div>
                         </ui:fragment>
@@ -1706,7 +1784,7 @@
                             </div>
                         </ui:fragment>
                         <div class="button-block">
-                            <p:commandButton styleClass="btn btn-default" value="#{bundle.continue}" 
+                            <p:commandButton rendered="#{valid}" styleClass="btn btn-default" value="#{bundle.continue}" 
                                              onclick="PF('publishDataset').hide();
                                                      PF('blockDatasetForm').hide();" action="#{DatasetPage.releaseDataset}" />
                             <button class="btn btn-link" onclick="PF('publishDataset').hide();
@@ -1861,10 +1939,14 @@
                         $('button[id$="updateOwnerDataverse"]').trigger('click');
                     }
                     
-                    function showPopup() {
+                    function showPopup(isTransfer) {
                         var outcome = document.getElementById("datasetForm:validateFilesOutcome").value;
                         if (outcome ==='Mixed'){
-                            PF('downloadMixed').show();
+                            if(isTransfer) {
+                               PF('globusTransferMixed').show();
+                            } else {
+                                PF('downloadMixed').show();
+                            }
                         }
                         if (outcome ==='FailEmpty'){
                             PF('selectFilesForDownload').show();
@@ -1876,7 +1958,7 @@
                             PF('downloadInvalid').show();
                         }
                         if (outcome ==='GuestbookRequired'){
-                            PF('downloadPopup').show();
+                            PF('guestbookAndTermsPopup').show();
                         }
                     }
                     
@@ -1884,19 +1966,21 @@
                         var termsofAccessHidden, fileAccessRequestHidden;
                         try{
                             termsofAccessHidden = document.getElementById("datasetForm:tabView:termsofAccessHiddenLT").value;
-                            fileAccessRequestHidden  = document.getElementById("datasetForm:tabView:fileAccessRequestHiddenLT").value;  
+                            fileAccessRequestHidden  = document.getElementById("datasetForm:tabView:fileAccessRequestHiddenLT").value;
+
+                            if ('#{DatasetPage.isHasRestrictedFiles()}' === 'true' && fileAccessRequestHidden === 'false' && termsofAccessHidden === '') {
+                                //Not compliant show error keep page open...
+                            } else {
+                                PF('blockDatasetForm').show();
+                                datasetSaveCommand();
+                            }
                         }
                         catch (error){
                             //terms not present so save...
                             PF('blockDatasetForm').show();
                             datasetSaveCommand();                        
                         }                         
-                        if ('#{DatasetPage.isHasRestrictedFiles()}' === 'true' && fileAccessRequestHidden === 'false' && termsofAccessHidden === '') {
-                           //Not compliant show error keep page open...
-                        } else {
-                            PF('blockDatasetForm').show();
-                            datasetSaveCommand();
-                        }
+
                     }
                                         
                     //]]>
diff --git a/src/main/webapp/dataverse.xhtml b/src/main/webapp/dataverse.xhtml
index aa3fa535807..41e2807c4fd 100644
--- a/src/main/webapp/dataverse.xhtml
+++ b/src/main/webapp/dataverse.xhtml
@@ -252,6 +252,8 @@
                                 </div>
                             </div>
                             <div class="col-md-6 form-group">
+                                <div class="row">
+                                    <div class="col-xs-12 form-group">
                                 <h:outputLabel for="description" styleClass="control-label">
                                     #{bundle.description}
                                     <span class="glyphicon glyphicon-question-sign tooltip-icon"
@@ -267,6 +269,21 @@
                                     </p:inputTextarea>
                                     <p:message for="description" display="text"/>
                                 </div>
+                                </div>
+                                    <div class="col-xs-12 form-group" jsf:rendered="#{DataversePage.getGuestbookEntryOptions().size()>2}">
+                                        <h:outputLabel for="dsGuestbookEntryOption" styleClass="control-label">
+                                            #{bundle.guestbookEntryOption} 
+                                            <span class="glyphicon glyphicon-question-sign tooltip-icon"
+                                                  data-toggle="tooltip" data-placement="auto right" data-original-title="#{bundle['dataverse.guestbookentryatrequest.title']}"></span>
+                                        </h:outputLabel>
+                                        <div class="form-col-container">
+                                            <h:selectOneMenu id="dsGuestbookEntryOption" styleClass="form-control" value="#{DataversePage.dataverse.guestbookEntryAtRequest}">
+                                                <f:selectItems value="#{DataversePage.getGuestbookEntryOptions()}" var="option" itemLabel="#{option.getValue()}" itemValue="#{option.getKey()}"/>
+                                            </h:selectOneMenu>
+                                            <p:message for="dsGuestbookEntryOption" display="text"/>
+                                        </div>
+                                    </div>
+                                </div>
                             </div>
                         </div>
                         <hr class="margin-top-half margin-bottom-half"/>
@@ -441,7 +458,7 @@
                                     </div>
                                     <div id="metrics-content" class="col-xs-8 small text-center">
                                         <h:outputFormat styleClass="metrics-downloads" value="{0} #{bundle['metrics.downloads']}">
-                                            <f:param value="#{guestbookResponseServiceBean.getCountOfAllGuestbookResponses()}"/>
+                                            <f:param value="#{guestbookResponseServiceBean.getTotalDownloadCount()}"/>
                                         </h:outputFormat>
                                     </div>
                                 </div>
diff --git a/src/main/webapp/dataverse_header.xhtml b/src/main/webapp/dataverse_header.xhtml
index 8ae117dd869..30818b9d683 100644
--- a/src/main/webapp/dataverse_header.xhtml
+++ b/src/main/webapp/dataverse_header.xhtml
@@ -268,7 +268,7 @@
                             </a>
                         </ui:fragment>
                         <ui:fragment rendered="#{breadcrumb.dvObject.instanceofDataset}">
-                            <c:set var="dsUrl" value="/dataset.xhtml?persistentId=#{breadcrumb.dvObject.globalIdString}#{breadcrumb.optionalUrlExtension }"/>
+                            <c:set var="dsUrl" value="/dataset.xhtml?persistentId=#{breadcrumb.dvObject.globalId.asString()}#{breadcrumb.optionalUrlExtension }"/>
                             <a id="breadcrumbLnk#{status.index}" href="#{widgetWrapper.isWidgetTarget(breadcrumb.dvObject) ? widgetWrapper.wrapURL(dsUrl) : dsUrl}" target="#{!widgetWrapper.widgetView or widgetWrapper.isWidgetTarget(breadcrumb.dvObject) ? '' : '_blank'}" rel="#{!widgetWrapper.widgetView or widgetWrapper.isWidgetTarget(breadcrumb.dvObject) ? '' : 'noopener'}">
                                 <h:outputText value="#{breadcrumb.breadcrumbText}"/>
                             </a>
diff --git a/src/main/webapp/dataverse_template.xhtml b/src/main/webapp/dataverse_template.xhtml
index 207b223efc1..a1f22a97642 100644
--- a/src/main/webapp/dataverse_template.xhtml
+++ b/src/main/webapp/dataverse_template.xhtml
@@ -81,7 +81,7 @@
         <script src="#{resource['js/jquery.sharrre.js']}?version=#{settingsWrapper.appVersion}"></script>
         <script src="#{resource['js/clipboard.min.js']}?version=#{settingsWrapper.appVersion}"></script>
         <o:onloadScript>bind_bsui_components();</o:onloadScript>
-        <ui:fragment rendered="#{settingsWrapper.getCVocConf().size() > 0}">
+        <ui:fragment rendered="#{settingsWrapper.getCVocConf(false).size() > 0}">
             <link href="https://cdn.jsdelivr.net/npm/select2@4.1.0-rc.0/dist/css/select2.min.css" rel="stylesheet" />
             <script src="https://cdn.jsdelivr.net/npm/select2@4.1.0-rc.0/dist/js/select2.min.js"></script>
         </ui:fragment> 
diff --git a/src/main/webapp/dataverseuser.xhtml b/src/main/webapp/dataverseuser.xhtml
index e3579c6812f..2426cf980d3 100644
--- a/src/main/webapp/dataverseuser.xhtml
+++ b/src/main/webapp/dataverseuser.xhtml
@@ -236,6 +236,14 @@
                                                         </o:param>
                                                     </h:outputFormat>
                                                 </ui:fragment>
+                                                <ui:fragment rendered="#{item.type == 'REQUESTEDFILEACCESS'}">
+                                                    <span class="icon-dataset text-icon-inline text-muted"></span>
+                                                    <h:outputFormat value="#{bundle['notification.requestedFileAccess']}" escape="false">
+                                                        <o:param>
+                                                            <a href="/dataset.xhtml?persistentId=#{item.theObject.getGlobalId()}" title="#{item.theObject.displayName}">#{item.theObject.displayName}</a>
+                                                        </o:param>
+                                                    </h:outputFormat>
+                                                </ui:fragment>
                                                 <ui:fragment rendered="#{item.type == 'GRANTFILEACCESS'}">
                                                     <span class="icon-dataset text-icon-inline text-muted"></span>
                                                     <h:outputFormat value="#{bundle['notification.grantFileAccess']}" escape="false">
@@ -422,12 +430,13 @@
                                                     </h:outputFormat>
                                                 </ui:fragment>
                                                 <ui:fragment rendered="#{item.type == 'STATUSUPDATED'}">
+                                                    <o:importFunctions type="edu.harvard.iq.dataverse.dataset.DatasetUtil" />
                                                     <span class="icon-dataset text-icon-inline text-muted"></span>
                                                     <h:outputFormat value="#{bundle['notification.statusUpdated']}" escape="false">
                                                         <o:param>
                                                             <a href="/dataset.xhtml?persistentId=#{item.theObject.getDataset().getGlobalId()}&amp;version=DRAFT&amp;faces-redirect=true" title="#{item.theObject.getDataset().getDisplayName()}">#{item.theObject.getDataset().getDisplayName()}</a>
                                                         </o:param>
-                                                        <f:param value="#{item.theObject.externalStatusLabel}"/>
+                                                        <f:param value="#{DatasetUtil:getLocaleExternalStatus(item.theObject.externalStatusLabel)}"/>
                                                     </h:outputFormat>
                                                 </ui:fragment>
                                                 <ui:fragment rendered="#{item.type == 'DATASETMENTIONED'}">
diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml
index a4e635b8c14..6fab335c0f3 100644
--- a/src/main/webapp/editFilesFragment.xhtml
+++ b/src/main/webapp/editFilesFragment.xhtml
@@ -11,9 +11,20 @@
     xmlns:o="http://omnifaces.org/ui"
     xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs">
 
-    <script src="#{resource['js/fileupload.js']}?version=#{systemConfig.getVersion()}"></script>
-    <script src="https://cdnjs.cloudflare.com/ajax/libs/crypto-js/3.1.2/components/core.js"></script>
-    <script src="https://cdnjs.cloudflare.com/ajax/libs/crypto-js/3.1.2/components/md5.js"></script>
+
+    <ui:param name="useDirectUpload" value="#{systemConfig.directUploadEnabled(dataset)}"/>
+    <ui:param name="checksumAlgName" value="#{systemConfig.getFileFixityChecksumAlgorithm().toString()}"/>
+    
+    <h:outputScript name='js/fileupload.js?version=#{systemConfig.getVersion()}' />
+    <ui:fragment rendered = '#{useDirectUpload}'>
+    <h:outputScript name='js/crypto-js/4.0.0/core.js'  />
+    <h:outputScript name='js/crypto-js/4.0.0/x64-core.js' rendered='#{(checksumAlgName eq "SHA-512")}'/>
+    <h:outputScript name='js/crypto-js/4.0.0/md5.js' rendered='#{true or checksumAlgName eq "MD5"}'/>
+    <h:outputScript name='js/crypto-js/4.0.0/sha1.js'  rendered='#{checksumAlgName eq "SHA-1"}'/>
+    <h:outputScript name='js/crypto-js/4.0.0/sha256.js'  rendered='#{checksumAlgName eq "SHA-256"}'/>
+    <h:outputScript name='js/crypto-js/4.0.0/sha512.js' rendered='#{checksumAlgName eq "SHA-512"}'/>
+    </ui:fragment>
+
     <!-- Static Tab Layout -->
     <div data-widget="content" class="ui-tabs ui-widget ui-widget-content ui-corner-all ui-hidden-container ui-tabs-top" id="datasetForm:tabView">
         <ul role="tablist" class="ui-tabs-nav ui-helper-reset ui-helper-clearfix ui-widget-header ui-corner-all">
@@ -80,6 +91,11 @@
                                                     rendered="#{!EditDatafilesPage.isUnlimitedUploadFileSize()}">
                                         <f:param value="#{EditDatafilesPage.getHumanMaxFileUploadSizeInBytes()}"/>
                                     </h:outputFormat>
+                                    
+                                    <h:outputFormat value=" #{bundle['file.selectToAdd.tipQuotaRemaining']}" escape="false" 
+                                                    rendered="#{EditDatafilesPage.isStorageQuotaEnforced()}">
+                                        <f:param value="#{EditDatafilesPage.getHumanMaxTotalUploadSizeInBytes()}"/>
+                                    </h:outputFormat>
                                     <h:outputFormat value=" #{bundle['file.selectToAdd.tipTabularLimit']}" escape="false"
                                                     rendered="#{EditDatafilesPage.maxIngestSizeInBytes != -1}">
                                         <f:param value="#{systemConfig.guidesBaseUrl}"/>
@@ -109,7 +125,7 @@
                         
                         $(document).ready(function () {
                             uploadWidgetDropMsg();
-                            setupDirectUpload(#{systemConfig.directUploadEnabled(EditDatafilesPage.dataset)});
+                             #{useDirectUpload ? 'setupDirectUpload(true);':''}
                         });
                         //]]>
                     </script>
@@ -143,7 +159,7 @@
                                   dragDropSupport="true"
                                   auto="#{!(systemConfig.directUploadEnabled(EditDatafilesPage.dataset))}"
                                   multiple="#{datasetPage || EditDatafilesPage.allowMultipleFileUpload()}"
-                                  disabled="#{lockedFromEdits ||  !(datasetPage || EditDatafilesPage.showFileUploadComponent()) }"
+                                  disabled="#{lockedFromEdits ||  !(datasetPage || EditDatafilesPage.showFileUploadComponent()) || EditDatafilesPage.isQuotaExceeded()}"
                                   listener="#{EditDatafilesPage.handleFileUpload}" 
                                   process="filesTable" 
                                   update=":datasetForm:filesTable, @([id$=filesButtons])"
@@ -155,6 +171,7 @@
                                   fileLimit="#{EditDatafilesPage.getMaxNumberOfFiles()}" 
                                   invalidSizeMessage="#{bundle['file.edit.error.file_exceeds_limit']}" 
                                   sequential="true"
+                                  previewWidth="-1"
                                   widgetVar="fileUploadWidget">
                         <f:passThroughAttribute name="aria-label" value="#{bundle['file.uploadFiles']}"/>
                     </p:fileUpload>
@@ -360,13 +377,13 @@
                                     <div class="pull-left col-file-thumb">
                                         <div class="thumbnail-block text-center">
                                             <!-- Thumbnail Preview -->
-                                            <span class="file-thumbnail-preview-img" jsf:rendered="#{!empty fileMetadata.dataFile.id and dataFileServiceBean.isThumbnailAvailable(fileMetadata.dataFile)}"
+                                            <span class="file-thumbnail-preview-img" jsf:rendered="#{!empty fileMetadata.dataFile.id and thumbnailServiceWrapper.isThumbnailAvailable(fileMetadata.dataFile)}"
                                                   data-container="body" data-toggle="popover" data-placement="top" data-trigger="hover" data-html="true" data-content="&lt;img src=&#34;/api/access/datafile/#{fileMetadata.dataFile.id}?imageThumb=400&#34; alt=&#34;#{bundle['file.preview']} #{fileMetadata.label}&#34; /&gt;"
                                                   data-template='&lt;div class="popover thumb-preview" role="tooltip"&gt;&lt;div class="arrow"&gt;&lt;/div&gt;&lt;h3 class="popover-title"&gt;&lt;/h3&gt;&lt;div class="popover-content"&gt;&lt;/div&gt;&lt;/div&gt;'>
                                                 <p:graphicImage value="/api/access/datafile/#{fileMetadata.dataFile.id}?imageThumb=true" alt="#{fileMetadata.label}"/>
                                             </span>
                                             <!-- Default Icon -->
-                                            <span class="icon-#{dataFileServiceBean.getFileThumbnailClass(fileMetadata.dataFile)} file-thumbnail-icon text-muted" jsf:rendered="#{(!empty fileMetadata.dataFile.id and !dataFileServiceBean.isThumbnailAvailable(fileMetadata.dataFile)) or (empty fileMetadata.dataFile.id and !fileMetadata.dataFile.previewImageAvailable)}"/>
+                                            <span class="icon-#{dataFileServiceBean.getFileThumbnailClass(fileMetadata.dataFile)} file-thumbnail-icon text-muted" jsf:rendered="#{(!empty fileMetadata.dataFile.id and !thumbnailServiceWrapper.isThumbnailAvailable(fileMetadata.dataFile)) or (empty fileMetadata.dataFile.id and !fileMetadata.dataFile.previewImageAvailable)}"/>
                                             <ui:fragment rendered="#{empty fileMetadata.dataFile.id and !empty fileMetadata.dataFile.storageIdentifier and fileMetadata.dataFile.previewImageAvailable}">
                                                 <img src="#{EditDatafilesPage.getTemporaryPreviewAsBase64(fileMetadata.dataFile.storageIdentifier)}" alt="#{fileMetadata.label}"/>
                                                 <h:outputText id="imgPreview" value="#{bundle['preview']}" styleClass="bg-info text-info text-center show"/>
@@ -584,7 +601,7 @@
         <p class="text-warning"><span class="glyphicon glyphicon-warning-sign"/> #{EditDatafilesPage.warningMessageForFileTypeDifferentPopUp}</p>
         <div class="button-block">
             <p:commandButton styleClass="btn btn-default" value="#{bundle['file.delete']}" onclick="PF('fileTypeDifferentPopup').hide()" oncomplete="uploadWidgetDropMsg();
-                            setupDirectUpload(#{systemConfig.directUploadEnabled(EditDatafilesPage.dataset)});"
+																	 #{useDirectUpload ? 'setupDirectUpload(true);': ''}"
                              action="#{EditDatafilesPage.deleteFiles()}"
                              update=":#{p:resolveClientId('datasetForm:filesTable', view)},:messagePanel,:#{p:resolveClientId('datasetForm:fileUpload', view)},uploadMessage"/>
             <button class="btn btn-default" onclick="PF('fileTypeDifferentPopup').hide();" type="button">
@@ -892,7 +909,7 @@
                                           filter="false">
                         <f:selectItems value="#{EditDatafilesPage.tabFileTags}" />
                         <p:ajax event="toggleSelect" listener="#{EditDatafilesPage.handleTabularTagsSelection}" update="tabularDataTags" />
-                        <p:ajax event="change" listener="#{EditDatafilesPage.TabularTagsSelection}" update="tabularDataTags" />
+                        <p:ajax event="change" listener="#{EditDatafilesPage.handleTabularTagsSelection}" update="tabularDataTags" />
                     </p:selectCheckboxMenu>
                     <p:message for="tabularDataTags" display="text" />
                 </div>
diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml
index ac1ec525b44..9c29fd777a1 100644
--- a/src/main/webapp/file-download-button-fragment.xhtml
+++ b/src/main/webapp/file-download-button-fragment.xhtml
@@ -12,7 +12,7 @@
     xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs">
     
     <!-- Access Status -->
-    <li class="dropdown-header">File Access <span class="glyphicon glyphicon-file"/></li>
+    <li class="dropdown-header">#{bundle['file.accessBtn.header.access']} <span class="glyphicon glyphicon-file"/></li>
     <li><span class="dropdown-item-text">
             <span class="#{!fileMetadata.restricted ?
                            'glyphicon glyphicon-globe text-success' : (!fileDownloadHelper.canDownloadFile(fileMetadata) ? 'glyphicon glyphicon-lock text-danger' : 'icon-unlock text-success')}"/>
@@ -24,14 +24,15 @@
         </span></li>
     <ui:fragment rendered="#{!fileMetadata.datasetVersion.deaccessioned and !fileDownloadHelper.canDownloadFile(fileMetadata)
                              and fileMetadata.dataFile.owner.fileAccessRequest and !dataFileServiceBean.isActivelyEmbargoed(fileMetadata)}">
-        <p:outputPanel id="requestPanel" styleClass="iq-dropdown-list-item #{fileMetadata.dataFile.fileAccessRequesters.contains(dataverseSession.user) ? 'disabled' : ''}"
+        <p:outputPanel id="requestPanel" styleClass="iq-dropdown-list-item #{fileMetadata.dataFile.containsActiveFileAccessRequestFromUser(dataverseSession.user) ? 'disabled' : ''}"
                        rendered="#{fileDownloadHelper.session.user.authenticated}">
-            <p:commandLink styleClass="btn-request #{fileMetadata.dataFile.fileAccessRequesters.contains(dataverseSession.user) ? 'italic'  : ''}"
+            <p:commandLink styleClass="btn-request #{fileMetadata.dataFile.containsActiveFileAccessRequestFromUser(dataverseSession.user) ? 'italic'  : ''}"
                         actionListener="#{fileDownloadHelper.handleCommandLinkClick(fileMetadata)}"
-                        update="@([id$=requestAccessConsolidated]), @([id$=requestPanel]),  @([id$=filesTable]), @([id$=messagePanel])"
+                        update="@([id$=requestAccessConsolidated]), @([id$=requestPanel]),  @([id$=filesTable]), @([id$=messagePanel]), @([id$=guestbookAndTermsPopup])"
                         id="requestAccessConsolidated"
-                        disabled="#{fileMetadata.dataFile.fileAccessRequesters.contains(dataverseSession.user)}">
-                #{fileMetadata.dataFile.fileAccessRequesters.contains(dataverseSession.user) ? bundle['file.accessRequested'] : bundle['file.requestAccess']}
+                        disabled="#{fileMetadata.dataFile.containsActiveFileAccessRequestFromUser(dataverseSession.user)}">
+                 <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle['file.requestAccess'])}"/>
+                #{fileMetadata.dataFile.containsActiveFileAccessRequestFromUser(dataverseSession.user) ? bundle['file.accessRequested'] : bundle['file.requestAccess']}
             </p:commandLink>
         </p:outputPanel>
         <li jsf:rendered="#{!fileDownloadHelper.session.user.authenticated}">
@@ -59,8 +60,8 @@
         
         <!-- GlobusTransfer, orig file only  -->
          <o:importFunctions type="edu.harvard.iq.dataverse.dataaccess.DataAccess" />
-        <li jsf:rendered="#{settingsWrapper.globusFileDownload and settingsWrapper.isGlobusEnabledStorageDriver(DataAccess:getStorageDriverFromIdentifier(fileMetadata.dataFile.storageIdentifier))}">
-            <p:commandLink rendered="#{!(downloadPopupRequired) 
+        <li jsf:rendered="#{settingsWrapper.globusFileDownload and settingsWrapper.isGlobusTransferable(fileMetadata)}">
+            <p:commandLink rendered="#{!(guestbookAndTermsPopupRequired) 
                                       and !(fileMetadata.dataFile.filePackage)}"
                          styleClass="btn-download"
                          process="@this"
@@ -70,22 +71,23 @@
                 <!-- no guest book/terms of use/etc. - straight to the download API url: -->
                 <span class="globus-btn ui-icon" title="#{bundle['file.globus.transfer']}"/> #{bundle['file.globus.of']} #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType}
             </p:commandLink>
-            <p:commandLink rendered="#{downloadPopupRequired and !fileMetadata.dataFile.filePackage}"
+            <p:commandLink rendered="#{guestbookAndTermsPopupRequired and !fileMetadata.dataFile.filePackage}"
                          styleClass="btn-download"
                          process="@this"
                          disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                         action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'GlobusTransfer')}"
-                         update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                         actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'GlobusTransfer')}"
+                         update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                         <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
                 <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
                 <!-- guest book or terms of use, etc. enabled - open "download popup" first: -->
-                GT: #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType}
+                <span class="globus-btn ui-icon" title="#{bundle['file.globus.transfer']}"/> #{bundle['file.globus.of']} #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType}
             </p:commandLink>
         </li>
         
         
         <!-- NON-TABULAR + PACKAGE DOWNLOAD OPTIONS -->
         <li jsf:rendered="#{!fileMetadata.dataFile.tabularData}">
-            <p:commandLink rendered="#{!(fileMetadata.dataFile.tabularData) and !(downloadPopupRequired) 
+            <p:commandLink rendered="#{!(fileMetadata.dataFile.tabularData) and !(guestbookAndTermsPopupRequired) 
                                     and fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload}"
                          styleClass="btn-download" process="@this"
                          disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
@@ -96,20 +98,21 @@
                 <!-- no guest book/terms of use/etc. - straight package popup -->
                 #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType}
             </p:commandLink>
-            <p:commandLink rendered="#{!(fileMetadata.dataFile.tabularData) and (downloadPopupRequired) 
+            <p:commandLink rendered="#{!(fileMetadata.dataFile.tabularData) and (guestbookAndTermsPopupRequired) 
                                     and fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload}"
                          styleClass="btn-download"
                          process="@this"
                          disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                         action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'package')}"
-                         update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                         actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'package')}"
+                         update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                         <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
                 <f:actionListener binding="#{packagePopupFragmentBean.setFileMetadata(fileMetadata)}" /> 
                 <!-- package data file: -->
                 <!-- guest book or terms of use, etc. enabled - open "download popup" first: -->
                 #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType}
             </p:commandLink>
-            <p:commandLink rendered="#{!(fileMetadata.dataFile.tabularData) and !(downloadPopupRequired) 
-                                      and !(fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload)}"
+            <p:commandLink rendered="#{!(fileMetadata.dataFile.tabularData) and !(guestbookAndTermsPopupRequired) 
+                                      and !(fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload) and settingsWrapper.isDownloadable(fileMetadata)}"
                          styleClass="btn-download"
                          process="@this"
                          disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
@@ -118,14 +121,15 @@
                 <!-- no guest book/terms of use/etc. - straight to the download API url: -->
                 #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType}
             </p:commandLink>
-            <p:commandLink rendered="#{!(fileMetadata.dataFile.tabularData) and downloadPopupRequired 
-                                      and !(fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload)}"
+            <p:commandLink rendered="#{!(fileMetadata.dataFile.tabularData) and guestbookAndTermsPopupRequired 
+                                      and !(fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload) and settingsWrapper.isDownloadable(fileMetadata)}"
                          styleClass="btn-download"
                          process="@this"
                          disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                         action="#{guestbookResponseService.modifyDatafile(guestbookResponse, fileMetadata)}"
-                         update="@widgetVar(downloadPopup)" oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                         actionListener="#{guestbookResponseService.modifyDatafile(guestbookResponse, fileMetadata)}"
+                         update="@widgetVar(guestbookAndTermsPopup)" oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
                 <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
+                <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
                 <!-- guest book or terms of use, etc. enabled - open "download popup" first: -->
                 #{fileMetadata.dataFile.friendlyType == 'Unknown' ? bundle['file.download.filetype.unknown'] : fileMetadata.dataFile.friendlyType}
             </p:commandLink>
@@ -134,23 +138,24 @@
         <ui:fragment rendered="#{fileMetadata.dataFile.tabularData}">
             <ui:remove>
             <li>
-                <p:commandLink styleClass="highlightBold btn-download" rendered="#{!(downloadPopupRequired)}"
+                <p:commandLink styleClass="highlightBold btn-download" rendered="#{!(guestbookAndTermsPopupRequired)}"
                                process="@this"
                                actionListener="#{fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse, fileMetadata, 'bundle')}">
                     #{bundle['file.downloadBtn.format.all']}
                 </p:commandLink>
-                <p:commandLink styleClass="highlightBold btn-download" rendered="#{downloadPopupRequired}"
+                <p:commandLink styleClass="highlightBold btn-download" rendered="#{guestbookAndTermsPopupRequired}"
                                process="@this"
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'bundle' )}"
-                               update="@widgetVar(downloadPopup)"
-                               oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'bundle' )}"
+                               update="@widgetVar(guestbookAndTermsPopup)"
+                               oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                               <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
                     #{bundle['file.downloadBtn.format.all']}
                 </p:commandLink>
             </li>
             <li role="presentation" class="divider"></li>
             </ui:remove>
             <li>
-                <p:commandLink styleClass="btn-download" rendered="#{!downloadPopupRequired and !(fileMetadata.dataFile.originalFormatLabel == 'UNKNOWN')}"
+                <p:commandLink styleClass="btn-download" rendered="#{!guestbookAndTermsPopupRequired and !(fileMetadata.dataFile.originalFormatLabel == 'UNKNOWN')}"
                                process="@this"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
                                actionListener="#{fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse, fileMetadata, 'original')}">
@@ -158,12 +163,13 @@
                         <f:param value="#{fileMetadata.dataFile.originalFormatLabel}"/>
                     </h:outputFormat>
                 </p:commandLink>
-                <p:commandLink styleClass="btn-download" rendered="#{downloadPopupRequired and !(fileMetadata.dataFile.originalFormatLabel == 'UNKNOWN')}"
+                <p:commandLink styleClass="btn-download" rendered="#{guestbookAndTermsPopupRequired and !(fileMetadata.dataFile.originalFormatLabel == 'UNKNOWN')}"
                                process="@this"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'original' )}"
-                               update="@widgetVar(downloadPopup)"
-                               oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'original' )}"
+                               update="@widgetVar(guestbookAndTermsPopup)"
+                               oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                    <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
                     <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
                     <h:outputFormat value="#{bundle['file.downloadBtn.format.original']}">
                         <f:param value="#{fileMetadata.dataFile.originalFormatLabel}"/>
@@ -171,35 +177,37 @@
                 </p:commandLink>
             </li>
             <li>
-                <p:commandLink styleClass="btn-download" rendered="#{!downloadPopupRequired}"
+                <p:commandLink styleClass="btn-download" rendered="#{!guestbookAndTermsPopupRequired}"
                                process="@this"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
                                actionListener="#{fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse, fileMetadata, 'tab')}">
                     #{bundle['file.downloadBtn.format.tab']}
                 </p:commandLink>
-                <p:commandLink styleClass="btn-download" rendered="#{downloadPopupRequired}"
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'tab' )}"
+                <p:commandLink styleClass="btn-download" rendered="#{guestbookAndTermsPopupRequired}"
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'tab' )}"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                               update="@widgetVar(downloadPopup)"
-                               oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                               update="@widgetVar(guestbookAndTermsPopup)"
+                               oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                    <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
                     <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
                     #{bundle['file.downloadBtn.format.tab']}
                 </p:commandLink>
             </li>
             <ui:fragment rendered="#{!(fileMetadata.dataFile.originalFormatLabel == 'RData')}">
                 <li>
-                    <p:commandLink styleClass="btn-download" rendered="#{!downloadPopupRequired}"
+                    <p:commandLink styleClass="btn-download" rendered="#{!guestbookAndTermsPopupRequired}"
                                    process="@this"
                                    disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
                                    actionListener="#{fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse, fileMetadata, 'RData')}">
                         #{bundle['file.downloadBtn.format.rdata']}
                     </p:commandLink>
-                    <p:commandLink styleClass="btn-download" rendered="#{downloadPopupRequired}"
+                    <p:commandLink styleClass="btn-download" rendered="#{guestbookAndTermsPopupRequired}"
                                    process="@this"
                                    disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                                   action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'RData' )}"
-                                   update="@widgetVar(downloadPopup)"
-                                   oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                                   actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'RData' )}"
+                                   update="@widgetVar(guestbookAndTermsPopup)"
+                                   oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                        <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
                         <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
                         #{bundle['file.downloadBtn.format.rdata']}
                     </p:commandLink>
@@ -211,22 +219,23 @@
     <!-- END: CAN DOWNLOAD ACCESS Options -->
     
     <!-- Download Metadata -->
-    <li class="dropdown-header">Download Metadata <span class="glyphicon glyphicon-download-alt"/></li>
+    <li class="dropdown-header">#{bundle['file.accessBtn.header.metadata']} <span class="glyphicon glyphicon-download-alt"/></li>
     
     <ui:fragment rendered="#{fileMetadata.dataFile.tabularData and fileDownloadHelper.canDownloadFile(fileMetadata)}">
         <li>
-            <p:commandLink styleClass="btn-download" rendered="#{!downloadPopupRequired}"
+            <p:commandLink styleClass="btn-download" rendered="#{!guestbookAndTermsPopupRequired}"
                            process="@this"
                            disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
                            actionListener="#{fileDownloadService.writeGuestbookAndStartFileDownload(guestbookResponse, fileMetadata, 'var')}">
                 #{bundle['file.downloadBtn.format.var']}
             </p:commandLink>
-            <p:commandLink styleClass="btn-download" rendered="#{downloadPopupRequired}"
+            <p:commandLink styleClass="btn-download" rendered="#{guestbookAndTermsPopupRequired}"
                            process="@this"
                            disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}" 
-                           action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'var' )}"
-                           update="@widgetVar(downloadPopup)"
-                           oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                           actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'var' )}"
+                           update="@widgetVar(guestbookAndTermsPopup)"
+                           oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                           <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
                 #{bundle['file.downloadBtn.format.var']}
             </p:commandLink>
         </li>
@@ -303,18 +312,48 @@
         <!-- externalTools -->
         <ui:repeat var="tool" value="#{exploreTools}">
             <li>
-                <p:commandLink rendered="#{!downloadPopupRequired}"
+                <p:commandLink rendered="#{!guestbookAndTermsPopupRequired}"
                                styleClass="btn-explore #{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
                                action="#{fileDownloadService.explore(guestbookResponse, fileMetadata, tool )}">
                     #{tool.getDisplayNameLang()}
                 </p:commandLink>
                 <!--The modifyDatafileAndFormat method below was added because on the dataset page, "tool" is null in the popup so we store it in the guestbookResponse because we know we'll need it later in the popup.-->
-                <p:commandLink rendered="#{downloadPopupRequired}"
-                               action="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'externalTool', tool)}"
+                <p:commandLink rendered="#{guestbookAndTermsPopupRequired}"
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'externalTool', tool)}"
                                styleClass="btn-explore #{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
                                disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload)}"
                                process="@this"
+                               update="@widgetVar(guestbookAndTermsPopup)"
+                               oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                               <f:actionListener binding="#{bean.setTermsGuestbookPopupAction(bundle.download)}"/>
+                    #{tool.getDisplayNameLang()}
+                </p:commandLink>
+            </li>
+        </ui:repeat>
+        <!-- END: externalTools -->
+    </ui:fragment>
+    
+    
+    <!-- Query Options -->
+    <ui:fragment rendered="#{fileDownloadHelper.canDownloadFile(fileMetadata) and not empty queryTools}">
+        <!--query Tools is set as a ui:param by the file and dataset pages-->
+        <li class="dropdown-header">#{bundle['file.accessBtn.header.query']} <span class="glyphicon glyphicon-equalizer"/></li>
+        <!-- query Tools -->
+        <ui:repeat var="tool" value="#{queryTools}">
+            <li>
+                <p:commandLink rendered="#{!downloadPopupRequired}"
+                               styleClass="btn-query #{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
+                               disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
+                               action="#{fileDownloadService.explore(guestbookResponse, fileMetadata, tool )}">
+                    #{tool.getDisplayNameLang()}
+                </p:commandLink>
+                <!--The modifyDatafileAndFormat method below was added because on the dataset page, "tool" is null in the popup so we store it in the guestbookResponse because we know we'll need it later in the popup.-->
+                <p:commandLink rendered="#{downloadPopupRequired}"
+                               actionListener="#{guestbookResponseService.modifyDatafileAndFormat(guestbookResponse, fileMetadata, 'externalTool', tool)}"
+                               styleClass="btn-query #{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
+                               disabled="#{(fileMetadata.dataFile.ingestInProgress or lockedFromDownload)}"
+                               process="@this"
                                update="@widgetVar(downloadPopup)"
                                oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
                     #{tool.getDisplayNameLang()}
@@ -323,8 +362,6 @@
         </ui:repeat>
         <!-- END: externalTools -->
     </ui:fragment>
-    <!-- Explore Options -->
-
     <!-- Compute Options -->
     <ui:fragment rendered="#{(isFilePg ? FilePage.showComputeButton() : DatasetPage.showComputeButton(fileMetadata)) and fileDownloadHelper.canDownloadFile(fileMetadata)}">
         <li class="dropdown-header">#{bundle['dataset.accessBtn.header.compute']} <span class="glyphicon glyphicon-flash"/></li>
@@ -336,4 +373,4 @@
         </li>
     </ui:fragment>
     <!-- END: Compute Options -->
-</ui:composition>
\ No newline at end of file
+</ui:composition>
diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml
index 8531f413b38..4dac1613266 100644
--- a/src/main/webapp/file-edit-button-fragment.xhtml
+++ b/src/main/webapp/file-edit-button-fragment.xhtml
@@ -25,7 +25,6 @@
     configureTools - for single file, list of configureTools for the file
     bean - the named value of the backing bean for the below method(s), also used by isFilePg param
     unrestrictFileAction - name of the method on the above bean to call for unrestrict (method must take a boolean)    
-    editFileAction - for selected files, name of method on the above bean to send for edit metadata
     refreshTagsPopoupAction - for selected files, name of method on the above bean to refresh tags popup
     -->            
     </ui:remove>
@@ -38,7 +37,7 @@
         <p:commandLink onclick="if (!(#{fileMetadata!=null} || testFilesSelected()))
                     return false;"                        
                        oncomplete="if(#{fileMetadata!=null}) window.location.assign('/editdatafiles.xhtml?selectedFileIds=#{fileMetadata.dataFile.id}&#38;datasetId=#{fileMetadata.datasetVersion.dataset.id}#{isFilePg?'&#38;referrer=FILE':''}&#38;version=#{fileMetadata.datasetVersion.version}')"
-                       action="#{bean[editFileAction]()}">
+                       action="#{bean.editFileMetadata()}">
             <h:outputText value="#{bundle['file.metadata']}"/>
         </p:commandLink>
     </li>
@@ -96,8 +95,7 @@
             <p:commandLink      update="@([id$=fileEmbargoPopup])" 
                                 onclick="if (!(#{fileMetadata!=null} || testFilesSelected()))
                                             return false;" 
-                                oncomplete="PF('fileEmbargoPopup').show();"
-                                action="#{bean[refreshEmbargoPopoupAction]()}">
+                                oncomplete="PF('fileEmbargoPopup').show();">
                                 <f:setPropertyActionListener target="#{fileMetadataForAction}" value="#{fileMetadata}" />
                 <h:outputText value="#{bundle['file.embargo']}"/>
             </p:commandLink> 
@@ -136,4 +134,4 @@
         </ui:include>		
     </ui:fragment>
   
-</ui:composition>
\ No newline at end of file
+</ui:composition>
diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml
index 8f8de725bdc..ffc4a1fcef7 100644
--- a/src/main/webapp/file-edit-popup-fragment.xhtml
+++ b/src/main/webapp/file-edit-popup-fragment.xhtml
@@ -76,6 +76,7 @@
                     <p:inputTextarea id="termsAccessInput" 
                                      value="#{bean.termsOfAccess}" autoResize="false" rows="5" styleClass="form-control" widgetVar="inputtoa">
                      <p:ajax event="keyup" update="restrictFileButton"/>
+                      <f:passThroughAttribute name="aria-label" value="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess']}"/>
                     </p:inputTextarea> 
                 </div>
             </div>
diff --git a/src/main/webapp/file-info-fragment.xhtml b/src/main/webapp/file-info-fragment.xhtml
index 8d30f0e2179..72fe279fbf8 100644
--- a/src/main/webapp/file-info-fragment.xhtml
+++ b/src/main/webapp/file-info-fragment.xhtml
@@ -28,8 +28,8 @@
     <p:outputPanel id="fileInfoInclude-filesTable" styleClass="media" rendered="#{fileMetadata != null}">
         <div class="media-left col-file-thumb" style="padding-top:4px;">
             <div class="media-object thumbnail-block text-center">
-                <span class="icon-#{dataFileServiceBean.getFileThumbnailClass(fileMetadata.dataFile)} file-thumbnail-icon text-muted" jsf:rendered="#{!fileDownloadHelper.canDownloadFile(fileMetadata) or !dataFileServiceBean.isThumbnailAvailable(fileMetadata.dataFile)}"/>
-                <span class="file-thumbnail-preview-img" jsf:rendered="#{fileDownloadHelper.canDownloadFile(fileMetadata) and dataFileServiceBean.isThumbnailAvailable(fileMetadata.dataFile)}"
+                <span class="icon-#{dataFileServiceBean.getFileThumbnailClass(fileMetadata.dataFile)} file-thumbnail-icon text-muted" jsf:rendered="#{!fileDownloadHelper.canDownloadFile(fileMetadata) or !thumbnailServiceWrapper.isThumbnailAvailable(fileMetadata.dataFile)}"/>
+                <span class="file-thumbnail-preview-img" jsf:rendered="#{fileDownloadHelper.canDownloadFile(fileMetadata) and thumbnailServiceWrapper.isThumbnailAvailable(fileMetadata.dataFile)}"
                       data-toggle="popover" data-placement="auto top" data-trigger="hover" data-html="true" data-content="&lt;img src=&#34;/api/access/datafile/#{fileMetadata.dataFile.id}?imageThumb=400&#34; alt=&#34;#{bundle['file.preview']} #{fileMetadata.label}&#34; /&gt;"
                       data-template='&lt;div class="popover thumb-preview" role="tooltip"&gt;&lt;div class="arrow"&gt;&lt;/div&gt;&lt;h3 class="popover-title"&gt;&lt;/h3&gt;&lt;div class="popover-content"&gt;&lt;/div&gt;&lt;/div&gt;'>
                     <p:graphicImage value="/api/access/datafile/#{fileMetadata.dataFile.id}?imageThumb=true" alt="#{fileMetadata.label}"/>
@@ -47,7 +47,7 @@
             <div class="fileNameOriginal">
                 <ui:fragment rendered="#{!editDatafilesPage}">
                     <!-- conditional render in file page links, if file has global id, use that; if not, we use database id -->
-                    <a href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty fileMetadata.dataFile.globalIdString ? 'persistentId=' : 'fileId=').concat(!empty fileMetadata.dataFile.globalIdString ? fileMetadata.dataFile.globalIdString : fileMetadata.dataFile.id).concat('&amp;version=').concat(fileMetadata.datasetVersion.friendlyVersionNumber))}">
+                    <a href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty fileMetadata.dataFile.globalId ? 'persistentId=' : 'fileId=').concat(!empty fileMetadata.dataFile.globalId ? fileMetadata.dataFile.globalId.asString() : fileMetadata.dataFile.id).concat('&amp;version=').concat(fileMetadata.datasetVersion.friendlyVersionNumber))}">
                         #{fileMetadata.label}
                     </a>
                 </ui:fragment>
@@ -67,7 +67,7 @@
                 </div>
                 <div class="downloads-block" jsf:rendered="#{!editDatafilesPage and !(settingsWrapper.rsyncOnly) and fileMetadata.dataFile.released}">
                     <h:outputFormat styleClass="visible-xs-block visible-sm-block visible-md-inline visible-lg-inline" id="fileDownloadCount" value="{0} #{bundle['metrics.downloads']}">
-                        <f:param value="#{guestbookResponseServiceBean.getCountGuestbookResponsesByDataFileId(fileMetadata.dataFile.id)}"/>
+                        <f:param value="#{guestbookResponseServiceBean.getDownloadCountByDataFileId(fileMetadata.dataFile.id)}"/>
                     </h:outputFormat>
                 </div>
                 <!-- CHECKSUM -->
diff --git a/src/main/webapp/file-request-access-popup-fragment.xhtml b/src/main/webapp/file-request-access-popup-fragment.xhtml
deleted file mode 100644
index 6541d86b686..00000000000
--- a/src/main/webapp/file-request-access-popup-fragment.xhtml
+++ /dev/null
@@ -1,53 +0,0 @@
-<ui:composition xmlns="http://www.w3.org/1999/xhtml"
-                xmlns:h="http://java.sun.com/jsf/html"
-                xmlns:f="http://java.sun.com/jsf/core"
-                xmlns:ui="http://java.sun.com/jsf/facelets"
-                xmlns:c="http://java.sun.com/jsp/jstl/core"
-                xmlns:p="http://primefaces.org/ui"
-                xmlns:o="http://omnifaces.org/ui"
-                xmlns:jsf="http://xmlns.jcp.org/jsf"
-                xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs">
-
-        <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
-        <p class="help-block">
-            #{someActivelyEmbargoedFiles ? bundle['file.requestAccessTermsDialog.embargoed.tip'] : bundle['file.requestAccessTermsDialog.tip']}
-        </p>
-        <p class="help-block" jsf:rendered="#{someActivelyEmbargoedFiles}">
-            #{bundle['file.requestAccessTermsDialog.embargoed']}
-        </p>
-        <div class="form-horizontal">
-            <div class="form-group" jsf:rendered="#{!empty workingVersion.termsOfUseAndAccess.license and workingVersion.termsOfUseAndAccess.license.name != 'CC0' and !empty workingVersion.termsOfUseAndAccess.termsOfUse}">
-                <label class="col-sm-3 control-label">
-                    #{bundle['file.dataFilesTab.terms.list.termsOfUse.termsOfUse']}
-                </label>
-                <div class="col-sm-6">
-                    <div class="panel panel-default">
-                        <div class="panel-body read-terms">
-                            <h:outputText value="#{MarkupChecker:sanitizeBasicHTML(workingVersion.termsOfUseAndAccess.termsOfUse)}" escape="false" />
-                        </div>
-                    </div>
-                </div>
-            </div>
-            <div class="form-group" jsf:rendered="#{!empty workingVersion.termsOfUseAndAccess.termsOfAccess}">
-                <label class="col-sm-3 control-label">
-                    #{bundle['file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess']}
-                </label>
-                <div class="col-sm-6">
-                    <div class="panel panel-default">
-                        <div class="panel-body read-terms">
-                            <h:outputText value="#{MarkupChecker:sanitizeBasicHTML(workingVersion.termsOfUseAndAccess.termsOfAccess)}" escape="false" />
-                        </div>
-                    </div>
-                </div>
-            </div>
-        </div>
-        <div class="button-block">
-            <p:commandButton styleClass="btn btn-default" value="#{bundle.acceptTerms}" 
-                           action="#{fileDownloadHelper.requestAccessIndirect()}"    
-                           update="@([id$=requestAccessConsolidated]), @([id$=requestPanel]), @([id$=messagePanel]) @([id$=filesTable])" oncomplete="PF('requestAccessPopup').hide();">
-            </p:commandButton>
-            <button class="btn btn-link" onclick="PF('requestAccessPopup').hide();PF('blockDatasetForm').hide();" type="button">
-                #{bundle.cancel}
-            </button>
-        </div>
-</ui:composition>
diff --git a/src/main/webapp/file-versions.xhtml b/src/main/webapp/file-versions.xhtml
index f4d6932485c..f7f259ce2e0 100644
--- a/src/main/webapp/file-versions.xhtml
+++ b/src/main/webapp/file-versions.xhtml
@@ -11,7 +11,7 @@
 <ui:fragment rendered="#{widgetWrapper.widgetView}">
     <p class="help-block">
         <h:outputFormat value="#{bundle['file.dataFilesTab.versions.widget.viewMoreInfo']}" escape="false">
-            <f:param value="#{FilePage.fileMetadata.datasetVersion.dataset.globalIdString}"/>
+            <f:param value="#{FilePage.fileMetadata.datasetVersion.dataset.globalId.asString()}"/>
             <f:param value="#{FilePage.fileMetadata.datasetVersion.dataset.displayName}"/>
             <f:param value="#{dataverseServiceBean.findRootDataverse().displayName}"/>
         </h:outputFormat>
diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml
index 6196780aa82..f69b5c35afd 100644
--- a/src/main/webapp/file.xhtml
+++ b/src/main/webapp/file.xhtml
@@ -29,6 +29,7 @@
                 <f:metadata>
                     <o:viewParam name="fileId" value="#{FilePage.fileId}"/>
                     <f:viewParam name="version" value="#{FilePage.version}"/>
+                    <o:viewParam name="toolType" value="#{FilePage.toolType}"/>
                     <f:viewParam name="persistentId" value="#{FilePage.persistentId}"/>
                     <f:viewParam name="datasetVersionId" value="#{FilePage.datasetVersionId}"/>
                     <f:viewAction action="#{dataverseSession.updateLocaleInViewRoot}"/>
@@ -75,6 +76,7 @@
                                         <h:outputText value="#{bundle['dataset.versionUI.unpublished']}" styleClass="label label-warning" rendered="#{!FilePage.fileMetadata.datasetVersion.dataset.released}"/>   
                                         <h:outputText value="#{bundle['dataset.versionUI.deaccessioned']}" styleClass="label label-danger" rendered="#{FilePage.fileMetadata.datasetVersion.deaccessioned}"/>
                                         <h:outputText value="#{FilePage.fileMetadata.datasetVersion.externalStatusLabel}" styleClass="label label-info" rendered="#{FilePage.fileMetadata.datasetVersion.externalStatusLabel!=null  and FilePage.canPublishDataset()}"/>
+                                        <h:outputText value="#{bundle['incomplete']}" styleClass="label label-danger" rendered="#{FilePage.fileMetadata.datasetVersion.draft and !FilePage.fileMetadata.datasetVersion.valid}"/>
                                         <!-- DATASET VERSION NUMBER -->
                                         <h:outputText styleClass="label label-default" rendered="#{FilePage.fileMetadata.datasetVersion.released and !(FilePage.fileMetadata.datasetVersion.draft or FilePage.fileMetadata.datasetVersion.inReview)}"
                                                       value="#{bundle['file.DatasetVersion']} #{FilePage.fileMetadata.datasetVersion.versionNumber}.#{FilePage.fileMetadata.datasetVersion.minorVersionNumber}"/> 
@@ -108,19 +110,22 @@
                                                     </button>
                                                     <ul class="dropdown-menu">
                                                         <li>
-                                                            <a jsf:id="endNoteLink-2" jsf:action="#{FilePage.fileDownloadService.downloadCitationXML(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}" >
-                                                                #{bundle['dataset.cite.downloadBtn.xml']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="endNoteLink-2" value="#{bundle['dataset.cite.downloadBtn.xml']}"
+                                                                action="#{FilePage.fileDownloadService.downloadCitationXML(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}"
+                                                            />
                                                         </li>
                                                         <li>
-                                                            <a jsf:id="risLink-2" jsf:actionListener="#{FilePage.fileDownloadService.downloadCitationRIS(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}">
-                                                                #{bundle['dataset.cite.downloadBtn.ris']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="risLink-2" value="#{bundle['dataset.cite.downloadBtn.ris']}"
+                                                                action="#{FilePage.fileDownloadService.downloadCitationRIS(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}"
+                                                            />
                                                         </li>
                                                         <li>
-                                                            <a jsf:id="bibLink-2" jsf:actionListener="#{FilePage.fileDownloadService.downloadCitationBibtex(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}" target="_blank">
-                                                                #{bundle['dataset.cite.downloadBtn.bib']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="bibLink-2" value="#{bundle['dataset.cite.downloadBtn.bib']}" target="_blank"
+                                                                action="#{FilePage.fileDownloadService.downloadCitationBibtex(FilePage.fileMetadata, null, FilePage.fileMetadata.dataFile.isIdentifierRegistered())}"
+                                                            />
                                                         </li>
                                                     </ul>
                                                 </div>
@@ -154,19 +159,22 @@
                                                     </button>
                                                     <ul class="dropdown-menu">
                                                         <li>
-                                                            <a jsf:id="endNoteLink" jsf:action="#{FilePage.fileDownloadService.downloadDatasetCitationXML(FilePage.fileMetadata.datasetVersion.dataset)}" >
-                                                                #{bundle['dataset.cite.downloadBtn.xml']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="endNoteLink" value="#{bundle['dataset.cite.downloadBtn.xml']}"
+                                                                action="#{FilePage.fileDownloadService.downloadDatasetCitationXML(FilePage.fileMetadata.datasetVersion.dataset)}"
+                                                            />
                                                         </li>
                                                         <li>
-                                                            <a jsf:id="risLink" jsf:actionListener="#{FilePage.fileDownloadService.downloadDatasetCitationRIS(FilePage.fileMetadata.datasetVersion.dataset)}">
-                                                                #{bundle['dataset.cite.downloadBtn.ris']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="risLink" value="#{bundle['dataset.cite.downloadBtn.ris']}"
+                                                                action="#{FilePage.fileDownloadService.downloadDatasetCitationRIS(FilePage.fileMetadata.datasetVersion.dataset)}"
+                                                            />
                                                         </li>
                                                         <li>
-                                                            <a jsf:id="bibLink" jsf:actionListener="#{FilePage.fileDownloadService.downloadDatasetCitationBibtex(FilePage.fileMetadata.datasetVersion.dataset)}" target="_blank">
-                                                                #{bundle['dataset.cite.downloadBtn.bib']}
-                                                            </a>
+                                                            <h:commandLink
+                                                                id="bibLink" value="#{bundle['dataset.cite.downloadBtn.bib']}" target="_blank"
+                                                                action="#{FilePage.fileDownloadService.downloadDatasetCitationBibtex(FilePage.fileMetadata.datasetVersion.dataset)}"
+                                                            />
                                                         </li>
                                                     </ul>
                                                 </div>
@@ -196,7 +204,7 @@
                                                                                    or FilePage.fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload}">
                                                             <ui:include src="file-download-button-fragment.xhtml">
                                                                 <ui:param name="fileMetadata" value="#{FilePage.fileMetadata}"/>
-                                                                <ui:param name="downloadPopupRequired" value="#{FilePage.downloadPopupRequired}"/>
+                                                                <ui:param name="guestbookAndTermsPopupRequired" value="#{FilePage.guestbookAndTermsPopupRequired}"/>
                                                                 <ui:param name="requestAccessPopupRequired" value="#{FilePage.requestAccessPopupRequired}"/>
                                                                 <ui:param name="guestbookResponse" value="#{FilePage.guestbookResponse}"/>
                                                                 <ui:param name="guestbookResponseService" value="#{FilePage.guestbookResponseService}"/>
@@ -204,7 +212,9 @@
                                                                 <ui:param name="isFilePg" value="true"/>
                                                                 <ui:param name="lockedFromDownload" value="#{FilePage.lockedFromDownload}"/>
                                                                 <ui:param name="exploreTools" value="#{FilePage.exploreTools}"/>
+                                                                <ui:param name="queryTools" value="#{FilePage.queryTools}"/>
                                                                 <ui:param name="anonymized" value="#{FilePage.anonymizedAccess}"/>
+                                                                <ui:param name="bean" value="#{FilePage}"/>
                                                             </ui:include>
                                                         </ui:fragment>
                                                         <!-- END: Explore/Download/Request Button Block -->
@@ -283,11 +293,12 @@
                                                     data-trigger="focus" data-html="true" data-content="#{bundle['metrics.file.tip.makedatacount']}"></a>
                                             </ui:fragment>
                                         </div>
+                                        <!-- END DATASET CITATION -->
                                         <div id="metrics-body">
                                             <!-- Classic downloads -->
                                             <div class="metrics-count-block" jsf:rendered="#{!settingsWrapper.makeDataCountDisplayEnabled}">
                                                 <h:outputFormat value="{0} #{bundle['metrics.downloads']}">
-                                                    <f:param value="#{guestbookResponseServiceBean.getCountGuestbookResponsesByDataFileId(FilePage.fileId)}"/>
+                                                    <f:param value="#{guestbookResponseServiceBean.getDownloadCountByDataFileId(FilePage.fileId)}"/>
                                                 </h:outputFormat>
                                                 <span class="glyphicon glyphicon-question-sign tooltip-icon"
                                                       data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['metrics.file.downloads.tip']}"></span>
@@ -295,10 +306,10 @@
                                             <!-- Make Data Count downloads -->
                                             <div class="metrics-count-block" jsf:rendered="#{settingsWrapper.makeDataCountDisplayEnabled}">
                                                 <h:outputFormat value="{0} #{bundle['metrics.downloads']}">
-                                                    <f:param value="#{guestbookResponseServiceBean.getCountGuestbookResponsesByDataFileId(FilePage.fileId)}"/>
+                                                    <f:param value="#{guestbookResponseServiceBean.getDownloadCountByDataFileId(FilePage.fileId)}"/>
                                                 </h:outputFormat>
                                                 <span class="glyphicon glyphicon-question-sign tooltip-icon"
-                                                        data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['metrics.file.downloads.tip']}"></span>
+                                                        data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['metrics.file.downloads.nonmdc.tip']}"></span>
                                             </div>
                                         </div>
                                     </div>
@@ -340,31 +351,35 @@
                                     <ui:param name="fileDownloadService" value="#{FilePage.fileDownloadService}"/>
                                 </ui:include>
                             </p:tab>
-                            <p:tab id="previewTab" title="#{bundle['file.previewTab.header']}" 
-                                   rendered="#{FilePage.toolsWithPreviews.size() > 0 and fileDownloadHelper.canDownloadFile(FilePage.fileMetadata)}">
+                            <p:tab id="previewTab" title="#{FilePage.toolTabTitle}" 
+                                   rendered="#{(FilePage.toolsWithPreviews.size() > 0 or FilePage.queryTools.size() > 0) and fileDownloadHelper.canDownloadFile(FilePage.fileMetadata)}">
                                 <!-- PREVIEW TERMS/GUESTBOOK FORM -->
-                                <ui:fragment rendered="#{FilePage.downloadPopupRequired and !FilePage.termsMet}">
-                                    <ui:include src="file-download-popup-fragment.xhtml">
+                                <ui:fragment rendered="#{FilePage.guestbookAndTermsPopupRequired and !FilePage.termsMet and (FilePage.getSelectedTool().previewTool or FilePage.getSelectedTool().queryTool)}">
+                                    <ui:include id="previewGB" src="guestbook-terms-popup-fragment.xhtml">
                                         <ui:param name="popupContext" value="previewTab"/>
                                         <ui:param name="workingVersion" value="#{FilePage.fileMetadata.datasetVersion}"/>
-                                        <ui:param name="downloadPopupRequired" value="#{FilePage.downloadPopupRequired}"/>
+                                        <ui:param name="guestbookAndTermsPopupRequired" value="#{FilePage.guestbookAndTermsPopupRequired}"/>
                                         <ui:param name="hasRestrictedFile" value="#{FilePage.fileMetadata.restricted}"/>
                                         <ui:param name="guestbookResponse" value="#{FilePage.guestbookResponse}"/>
                                         <ui:param name="guestbookResponseService" value="#{FilePage.guestbookResponseService}"/>
                                         <ui:param name="fileDownloadService" value="#{FilePage.fileDownloadService}"/>
                                         <ui:param name="lockedFromDownload" value="#{FilePage.lockedFromDownload}"/>
+                                        <ui:param name="termsGuestbookPopupAction" value="#{bundle.download}"/>
+                                        <ui:param name="guestbookPopupRequiredAtDownload" value="#{FilePage.guestbookPopupRequiredAtDownload}"/>
                                     </ui:include>
                                 </ui:fragment>
                                 <!-- PREVIEW EXTERNAL TOOL -->
-                                <ui:fragment rendered="#{(!FilePage.downloadPopupRequired) or (FilePage.downloadPopupRequired and FilePage.termsMet)}">
+                                <ui:fragment rendered="#{(!FilePage.guestbookAndTermsPopupRequired) or FilePage.termsMet}">
                                 <div class="btn-toolbar margin-bottom" role="toolbar" aria-label="#{bundle['file.previewTab.button.label']}">
                                     <!-- Preview Button Group -->
-                                    <div class="btn-group" jsf:rendered="#{FilePage.toolsWithPreviews.size() > 1 and fileDownloadHelper.canDownloadFile(FilePage.fileMetadata)}">
+                                    <div class="btn-group" jsf:rendered="#{FilePage.allAvailableTools.size() > 1 and fileDownloadHelper.canDownloadFile(FilePage.fileMetadata)}">
                                         <button type="button" id="selectTool" class="btn btn-default dropdown-toggle" data-toggle="dropdown">
-                                            <span class="glyphicon glyphicon-eye-open"/> #{bundle['file.previewTab.button.label']} <span class="caret"></span>
+                                             #{bundle['file.toolsTab.button.label']} <span class="caret"></span>
                                         </button>
+                                        
+                                        <ui:fragment rendered="#{FilePage.allAvailableTools.size() > 1}">
                                         <ul class="dropdown-menu">
-                                            <ui:repeat value="#{FilePage.toolsWithPreviews}" var="tool">
+                                            <ui:repeat value="#{FilePage.allAvailableTools}" var="tool">
                                                 <li>
                                                     <h:commandLink action="#{FilePage.setSelectedTool(tool)}">
                                                         <h:outputText value="#{tool.getDisplayNameLang()}"/>
@@ -372,11 +387,14 @@
                                                 </li>
                                             </ui:repeat>
                                         </ul>
+                                        </ui:fragment>
                                     </div>
                                     <!-- END: Preview Button Group -->
-                                    <div class="btn-group" jsf:rendered="#{FilePage.toolsWithPreviews.size() > 0 and fileDownloadHelper.canDownloadFile(FilePage.fileMetadata)}">
+                                    <div class="btn-group" jsf:rendered="#{(FilePage.toolsWithPreviews.size() > 0 or FilePage.queryTools.size() > 0) and fileDownloadHelper.canDownloadFile(FilePage.fileMetadata)}">
                                         <!-- Modular/Configured Explore Tool -->
-                                        <h:commandLink rendered="#{!downloadPopupRequired}" 
+                                        <ui:remove><!-- Not sure this div will ever be rendered when terms are required and FilePage.termsMet isn't true. Leaving this logic in in case that is incorrect or changes going forward --></ui:remove>
+                            
+                                        <h:commandLink rendered="#{(!FilePage.guestbookAndTermsPopupRequired) or FilePage.termsMet}" 
                                                          type="submit"
                                                          styleClass="btn btn-default #{(FilePage.fileMetadata.dataFile.ingestInProgress) ? 'disabled' : ''}"
                                                          disabled="#{(FilePage.fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
@@ -387,14 +405,15 @@
                                                 <f:param value="#{FilePage.selectedTool.getDisplayNameLang()}"/>
                                             </h:outputFormat>
                                         </h:commandLink>
-                                        <p:commandLink rendered="#{downloadPopupRequired}"
+                                        <p:commandLink rendered="#{FilePage.guestbookAndTermsPopupRequired and !FilePage.termsMet}"
                                                          action="#{FilePage.guestbookResponseService.modifyDatafileAndFormat(FilePage.guestbookResponse, FilePage.fileMetadata, 'externalTool', FilePage.selectedTool)}"
                                                          styleClass="btn btn-default"
                                                          disabled="#{(FilePage.fileMetadata.dataFile.ingestInProgress or lockedFromDownload) ? 'disabled' : ''}"
                                                          type="submit"
                                                          process="@this"
-                                                         update="@widgetVar(downloadPopup)"
-                                                         oncomplete="PF('downloadPopup').show();handleResizeDialog('downloadPopup');">
+                                                         update="@widgetVar(guestbookAndTermsPopup)"
+                                                         oncomplete="PF('guestbookAndTermsPopup').show();handleResizeDialog('guestbookAndTermsPopup');">
+                                            <f:actionListener binding="#{FilePage.setTermsGuestbookPopupAction(bundle.download)}"/>
                                             <span class="glyphicon glyphicon-#{FilePage.selectedTool.exploreTool ? 'equalizer' : 'new-window'}"></span>
                                             <h:outputFormat value="#{FilePage.selectedTool.exploreTool ? bundle['file.previewTab.exploreBtn'] : bundle['file.previewTab.openBtn']}">
                                                 <f:param value="#{bundle.explore}"/>
@@ -404,7 +423,7 @@
                                     </div>
                                 </div>
                                 <!-- FRAME EXTERNAL TOOL EMBED -->
-                                <div id="previewPresentation" class="embed-responsive embed-responsive-16by9" jsf:rendered="#{FilePage.toolsWithPreviews.size() > 0 and fileDownloadHelper.canDownloadFile(FilePage.fileMetadata)}">
+                                <div id="previewPresentation" class="embed-responsive embed-responsive-16by9" jsf:rendered="#{(FilePage.toolsWithPreviews.size() > 0  or FilePage.queryTools.size() > 0) and fileDownloadHelper.canDownloadFile(FilePage.fileMetadata)}">
                                     <iframe role="presentation" title="#{bundle['file.previewTab.presentation']}" src="#{FilePage.preview(FilePage.selectedTool)}"></iframe>
                                 </div>
                                 </ui:fragment>
@@ -609,53 +628,38 @@
                     </ui:include> 
                     <p:dialog id="shareDialog" header="#{bundle['file.share.title']}" widgetVar="shareDialog" modal="true" rendered="#{!FilePage.fileMetadata.datasetVersion.deaccessioned}">
                         <p class="help-block">#{bundle['file.share.tip']}</p>
-                        <div id="sharrre-widget" data-text="#{bundle['file.share.text']}" data-url="#{systemConfig.dataverseSiteUrl}/file.xhtml?#{empty FilePage.fileMetadata.dataFile.globalIdString ? 'fileId' : 'persistentId'}=#{empty FilePage.fileMetadata.dataFile.globalIdString ? FilePage.fileMetadata.dataFile.id : FilePage.fileMetadata.dataFile.globalIdString}&amp;version=#{FilePage.fileMetadata.datasetVersion.friendlyVersionNumber}"></div>
+                        <div id="sharrre-widget" data-text="#{bundle['file.share.text']}" data-url="#{systemConfig.dataverseSiteUrl}/file.xhtml?#{empty FilePage.fileMetadata.dataFile.globalId ? 'fileId' : 'persistentId'}=#{empty FilePage.fileMetadata.dataFile.globalId ? FilePage.fileMetadata.dataFile.id : FilePage.fileMetadata.dataFile.globalId.asString()}&amp;version=#{FilePage.fileMetadata.datasetVersion.friendlyVersionNumber}"></div>
                         <div class="button-block">
                             <button class="btn btn-default" onclick="PF('shareDialog').hide()" type="button">
                                 #{bundle.close}
                             </button>
                         </div>
                     </p:dialog>                                                              
-                    <p:dialog id="downloadPopup" styleClass="largePopUp" header="#{bundle['file.downloadDialog.header']}" widgetVar="downloadPopup" modal="true">
-                         <ui:include src="file-download-popup-fragment.xhtml">
-                            <ui:param name="popupContext" value="downloadFile"/>
-                            <ui:param name="workingVersion" value="#{FilePage.fileMetadata.datasetVersion}"/>
-                            <ui:param name="downloadPopupRequired" value="#{FilePage.downloadPopupRequired}"/>
-                            <ui:param name="hasRestrictedFile" value="#{FilePage.fileMetadata.restricted}"/>
-                            <ui:param name="guestbookResponse" value="#{FilePage.guestbookResponse}"/>
-                            <ui:param name="guestbookResponseService" value="#{FilePage.guestbookResponseService}"/>
-                            <ui:param name="fileDownloadService" value="#{FilePage.fileDownloadService}"/>
-                            <ui:param name="lockedFromDownload" value="#{FilePage.lockedFromDownload}"/>
-                        </ui:include>
-                    </p:dialog>
+
                     <p:dialog id="downloadPackagePopup" styleClass="smallPopUp" header="#{bundle['packageDownload.title']}" widgetVar="downloadPackagePopup" modal="true">
                          <ui:include src="package-download-popup-fragment.xhtml">
                             <ui:param name="workingVersion" value="#{FilePage.fileMetadata.datasetVersion}"/>
-                            <ui:param name="downloadPopupRequired" value="#{FilePage.downloadPopupRequired}"/>
+                            <ui:param name="guestbookAndTermsPopupRequired" value="#{FilePage.guestbookAndTermsPopupRequired}"/>
                             <ui:param name="guestbookResponse" value="#{FilePage.guestbookResponse}"/>
                             <ui:param name="guestbookResponseService" value="#{FilePage.guestbookResponseService}"/>
                             <ui:param name="fileDownloadService" value="#{FilePage.fileDownloadService}"/>
                             <ui:param name="lockedFromDownload" value="#{FilePage.lockedFromDownload}"/>
                         </ui:include>
                     </p:dialog>                  
-                    <p:dialog id="requestAccessPopup" styleClass="largePopUp" header="#{bundle['file.requestAccess']}" widgetVar="requestAccessPopup" modal="true">
+                    <p:dialog id="guestbookAndTermsPopup" styleClass="largePopUp" header="#{bundle['file.downloadDialog.header']}" widgetVar="guestbookAndTermsPopup" modal="true">
                         <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
-                        <ui:include src="file-request-access-popup-fragment.xhtml">
+                        <ui:include src="guestbook-terms-popup-fragment.xhtml">
                             <ui:param name="workingVersion" value="#{FilePage.fileMetadata.datasetVersion}"/>
                             <ui:param name="someActivelyEmbargoedFiles" value="#{FilePage.cantRequestDueToEmbargo}"/>
+                            <ui:param name="guestbookAndTermsPopupRequired" value="#{FilePage.guestbookAndTermsPopupRequired}"/>
+                            <ui:param name="guestbookResponse" value="#{FilePage.guestbookResponse}"/>
+                            <ui:param name="fileDownloadHelper" value="#{FilePage.fileDownloadHelper}"/>
                             <ui:param name="fileDownloadService" value="#{FilePage.fileDownloadService}"/>
+                            <ui:param name="termsGuestbookPopupAction" value="#{FilePage.termsGuestbookPopupAction}"/>
+                            <ui:param name="guestbookPopupRequiredAtDownload" value="#{FilePage.guestbookPopupRequiredAtDownload}"/>
+                            
                         </ui:include>
-                    </p:dialog>                    
-                    <p:dialog id="mapDataDialog" header="#{bundle['file.mapData.unpublished.header']}" widgetVar="mapDataDialog" modal="true">
-                        <p class="help-block">
-                            <span class="text-danger"><span class="glyphicon glyphicon-exclamation-sign"/> #{bundle['file.mapData.unpublished.message']}</span>
-                        </p>
-                        <div class="button-block">
-                            <button type="button" class="btn btn-default" onclick="PF('mapDataDialog').hide();PF('blockDatasetForm').hide();">
-                                #{bundle.close}
-                            </button>
-                        </div>
-                    </p:dialog>                    
+                    </p:dialog>
                     <ui:include rendered="#{systemConfig.provCollectionEnabled}" src="provenance-popups-fragment.xhtml">
                         <ui:param name="saveInPopup" value="true"/>
                     </ui:include>
@@ -670,7 +674,7 @@
                             </h:outputFormat>
                         </p>
                         <div class="button-block">
-                            <button class="btn btn-default" onclick="PF('accessSignUpLogIn_popup').hide();PF('blockDatasetForm').hide();" type="button">
+                            <button class="btn btn-default" onclick="PF('accessSignUpLogIn_popup').hide();" type="button">
                                 #{bundle.close}
                             </button>
                         </div>
diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml
index 1370b308137..58899ab7062 100644
--- a/src/main/webapp/filesFragment.xhtml
+++ b/src/main/webapp/filesFragment.xhtml
@@ -108,7 +108,7 @@
                         <p:remoteCommand name="submitsearch" action="#{DatasetPage.updateFileSearch()}" process="@this @widgetVar(inputSearchTerm)" update="@form" partialSubmit="true"/>
 
                         <span class="input-group-btn">
-                            <p:commandLink title="#{bundle['dataverse.search.btn.find']}" styleClass="btn btn-default bootstrap-button-tooltip" action="#{DatasetPage.updateFileSearch()}" process="@this @widgetVar(inputSearchTerm)" update="@form" partialSubmit="true">
+                            <p:commandLink title="#{bundle['dataverse.search.btn.find']}" styleClass="btn btn-default bootstrap-button-tooltip" action="#{DatasetPage.updateFileSearch()}" process="@this @widgetVar(inputSearchTerm)" update="@form" partialSubmit="true"  ariaLabel="#{bundle['dataverse.search.btn.find']}">
                                 <span class="glyphicon glyphicon-search no-text"/>
                             </p:commandLink>
                         </span>
@@ -134,7 +134,7 @@
 
             <!-- FILES FILTER FACETS -->
             <div class="row" style="margin-bottom:10px;" jsf:rendered="#{(DatasetPage.workingVersion.fileMetadatas.size() gt 1)}">
-                <div class="col-xs-9 text-left" jsf:rendered="#{DatasetPage.indexedVersion}">
+                <div class="col-xs-7 text-left" jsf:rendered="#{DatasetPage.indexedVersion}">
                     <div class="text-muted small" style="font-weight:normal;" jsf:rendered="#{(DatasetPage.fileMetadatasSearch.size() gt 0)}">#{bundle['file.results.filter']}</div>
                     <!-- FACET: TYPE -->
                     <div class="btn-group" style="margin-right:20px;" jsf:rendered="#{!(empty DatasetPage.fileTypeFacetLabels)}">
@@ -144,7 +144,7 @@
                         </button>
                         <ul class="dropdown-menu">
                             <!-- link for "All" - all file types: -->
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{empty DatasetPage.fileTypeFacet ? 'highlightBold' : ''}" value="#{bundle['file.results.filter.all']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
@@ -153,10 +153,12 @@
                                     <f:param name="fileTag" value="#{DatasetPage.fileTagsFacet}"/>
                                     <f:param name="fileSortField" value="#{DatasetPage.fileSortField}"/>
                                     <f:param name="fileSortOrder" value="#{DatasetPage.fileSortOrder}"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
+                                    <f:param name="folderPresort" value="#{DatasetPage.folderPresort}"/>
                                 </h:outputLink></li>
                             <li role="separator" class="divider"></li>
                             <ui:repeat value="#{DatasetPage.fileTypeFacetLabels}" var="facetLabel">
-                                <li><h:outputLink value="/dataset.xhtml">
+                                <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                         <h:outputFormat styleClass="#{facetLabel.name == DatasetPage.fileTypeFacet ? 'highlightBold' : ''}" value="#{facetLabel.name} &#40;{0}&#41;">
                                             <f:param value="#{facetLabel.count}"/>
                                         </h:outputFormat>
@@ -168,6 +170,8 @@
                                         <f:param name="fileTag" value="#{DatasetPage.fileTagsFacet}"/>
                                         <f:param name="fileSortField" value="#{DatasetPage.fileSortField}"/>
                                         <f:param name="fileSortOrder" value="#{DatasetPage.fileSortOrder}"/>
+                                        <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
+                                        <f:param name="folderPresort" value="#{DatasetPage.folderPresort}"/>
                                     </h:outputLink></li>
                             </ui:repeat>
                         </ul>
@@ -181,7 +185,7 @@
                         </button>
                         <ul class="dropdown-menu">
                             <!-- link for "All" - i.e., all (both) types of access: -->
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{empty DatasetPage.fileAccessFacet ? 'highlightBold' : ''}" value="#{bundle['file.results.filter.all']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
@@ -190,10 +194,12 @@
                                     <f:param name="fileTag" value="#{DatasetPage.fileTagsFacet}"/>
                                     <f:param name="fileSortField" value="#{DatasetPage.fileSortField}"/>
                                     <f:param name="fileSortOrder" value="#{DatasetPage.fileSortOrder}"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
+                                    <f:param name="folderPresort" value="#{DatasetPage.folderPresort}"/>
                                 </h:outputLink></li>
                             <li role="separator" class="divider"></li>
                             <ui:repeat value="#{DatasetPage.fileAccessFacetLabels}" var="facetLabel">
-                                <li><h:outputLink value="/dataset.xhtml">
+                                <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                         <h:outputFormat styleClass="#{facetLabel.name == DatasetPage.fileAccessFacet ? 'highlightBold' : ''}" value="#{facetLabel.name} &#40;{0}&#41;">
                                             <f:param value="#{facetLabel.count}"/>
                                         </h:outputFormat>
@@ -205,6 +211,8 @@
                                         <f:param name="fileTag" value="#{DatasetPage.fileTagsFacet}"/>
                                         <f:param name="fileSortField" value="#{DatasetPage.fileSortField}"/>
                                         <f:param name="fileSortOrder" value="#{DatasetPage.fileSortOrder}"/>
+                                        <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
+                                        <f:param name="folderPresort" value="#{DatasetPage.folderPresort}"/>
                                     </h:outputLink></li>
                             </ui:repeat>
                         </ul>
@@ -218,7 +226,7 @@
                         </button>
                         <ul class="dropdown-menu">
                             <!-- link for "All" - i.e., all file tags: -->
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{empty DatasetPage.fileTagsFacet ? 'highlightBold' : ''}" value="#{bundle['file.results.filter.all']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
@@ -227,10 +235,12 @@
                                     <f:param name="fileAccess" value="#{DatasetPage.fileAccessFacet}"/>
                                     <f:param name="fileSortField" value="#{DatasetPage.fileSortField}"/>
                                     <f:param name="fileSortOrder" value="#{DatasetPage.fileSortOrder}"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
+                                    <f:param name="folderPresort" value="#{DatasetPage.folderPresort}"/>
                                 </h:outputLink></li>
                             <li role="separator" class="divider"></li>
                             <ui:repeat value="#{DatasetPage.fileTagsFacetLabels}" var="facetLabel">
-                                <li><h:outputLink value="/dataset.xhtml">
+                                <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                         <h:outputFormat styleClass="#{facetLabel.name == DatasetPage.fileTagsFacet ? 'highlightBold' : ''}" value="#{facetLabel.name} &#40;{0}&#41;">
                                             <f:param value="#{facetLabel.count}"/>
                                         </h:outputFormat>
@@ -242,28 +252,43 @@
                                         <f:param name="fileTag" value='"#{facetLabel.name}"'/>
                                         <f:param name="fileSortField" value="#{DatasetPage.fileSortField}"/>
                                         <f:param name="fileSortOrder" value="#{DatasetPage.fileSortOrder}"/>
+                                        <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
+                                        <f:param name="folderPresort" value="#{DatasetPage.folderPresort}"/>
                                     </h:outputLink></li>
                             </ui:repeat>
                         </ul>
                     </div>
                 </div>
 
-                <div class="col-xs-3 text-right #{DatasetPage.indexedVersion ? '' : 'col-xs-offset-9'}">
+                    
+
+                
+                <div class="file-sort col-xs-5 text-right #{DatasetPage.indexedVersion ? '' : 'col-xs-offset-7'}">
+                    <div class="file-group-by col-xs-7 text-left text-muted small" style="font-weight:normal;" jsf:rendered="#{DatasetPage.allowUserManagementOfOrder()}">
+                                    <p:selectBooleanCheckbox id="fps" value="#{DatasetPage.folderPresort}" itemLabel="#{bundle['file.results.presort.folder']}" title="#{bundle['file.results.presort.folder.desc']} rendered='#{DatasetPage.orderByFolder()}'">
+                        <p:ajax update="filesTable, messagePanel" process="fps, tps" listener="#{DatasetPage.sort()}"/>
+                    </p:selectBooleanCheckbox>
+                    <p:selectBooleanCheckbox id="tps" value="#{DatasetPage.tagPresort}" itemLabel="#{bundle['file.results.presort.tag']}" title="#{bundle['file.results.presort.tag.desc']}"  rendered='#{null != DatasetPage.getSortOrder()}'>
+                        <p:ajax update="filesTable, messagePanel" process="fps, tps" listener="#{DatasetPage.sort()}"/>
+                    </p:selectBooleanCheckbox>
+                </div>                
                     <!-- FILES SORT -->
                     <div class="btn-group">
                         <button type="button" class="btn btn-default dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
                             <span class="glyphicon glyphicon-sort"></span> #{bundle['file.results.btn.sort']} <span class="caret"></span>
                         </button>
                         <ul class="dropdown-menu pull-right text-left">
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{((empty DatasetPage.fileSortField) or DatasetPage.fileSortField == 'name') and (empty DatasetPage.fileSortOrder) ? 'highlightBold' : ''}" value="#{bundle['file.results.btn.sort.option.nameAZ']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
                                     <f:param name="q" value="#{DatasetPage.fileLabelSearchTerm}"/>
                                     <f:param name="fileTypeGroupFacet" value="#{DatasetPage.fileTypeFacet}"/>
                                     <f:param name="fileAccess" value="#{DatasetPage.fileAccessFacet}"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
+                                    <f:param name="folderPresort" value="#{DatasetPage.folderPresort}"/>
                                 </h:outputLink></li>
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{(DatasetPage.fileSortField == 'name' and DatasetPage.fileSortOrder == 'desc') ? 'highlightBold' : ''}" value="#{bundle['file.results.btn.sort.option.nameZA']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
@@ -272,8 +297,10 @@
                                     <f:param name="fileAccess" value="#{DatasetPage.fileAccessFacet}"/>
                                     <f:param name="fileSortField" value="name"/>
                                     <f:param name="fileSortOrder" value="desc"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
+                                    <f:param name="folderPresort" value="#{DatasetPage.folderPresort}"/>
                                 </h:outputLink></li>
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{(DatasetPage.fileSortField == 'date' and (empty DatasetPage.fileSortOrder)) ? 'highlightBold' : ''}" value="#{bundle['file.results.btn.sort.option.newest']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
@@ -281,8 +308,9 @@
                                     <f:param name="fileTypeGroupFacet" value="#{DatasetPage.fileTypeFacet}"/>
                                     <f:param name="fileAccess" value="#{DatasetPage.fileAccessFacet}"/>
                                     <f:param name="fileSortField" value="date"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
                                 </h:outputLink></li>
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{(DatasetPage.fileSortField == 'date' and DatasetPage.fileSortOrder == 'desc') ? 'highlightBold' : ''}" value="#{bundle['file.results.btn.sort.option.oldest']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
@@ -291,8 +319,9 @@
                                     <f:param name="fileAccess" value="#{DatasetPage.fileAccessFacet}"/>
                                     <f:param name="fileSortField" value="date"/>
                                     <f:param name="fileSortOrder" value="desc"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
                                 </h:outputLink></li>
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{DatasetPage.fileSortField == 'size' ? 'highlightBold' : ''}" value="#{bundle['file.results.btn.sort.option.size']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
@@ -300,8 +329,9 @@
                                     <f:param name="fileTypeGroupFacet" value="#{DatasetPage.fileTypeFacet}"/>
                                     <f:param name="fileAccess" value="#{DatasetPage.fileAccessFacet}"/>
                                     <f:param name="fileSortField" value="size"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
                                 </h:outputLink></li>
-                            <li><h:outputLink value="/dataset.xhtml">
+                            <li><h:outputLink rel="nofollow" value="/dataset.xhtml">
                                     <h:outputText styleClass="#{DatasetPage.fileSortField == 'type' ? 'highlightBold' : ''}" value="#{bundle['file.results.btn.sort.option.type']}"/>
                                     <f:param name="persistentId" value="#{DatasetPage.persistentId}"/>
                                     <f:param name="version" value="#{DatasetPage.version}"/>
@@ -309,6 +339,7 @@
                                     <f:param name="fileTypeGroupFacet" value="#{DatasetPage.fileTypeFacet}"/>
                                     <f:param name="fileAccess" value="#{DatasetPage.fileAccessFacet}"/>
                                     <f:param name="fileSortField" value="type"/>
+                                    <f:param name="tagPresort" value="#{DatasetPage.tagPresort}"/>
                                 </h:outputLink></li>
                         </ul>
                     </div>
@@ -390,7 +421,6 @@
                             <ui:param name="fileMetadataForAction" value="#{DatasetPage.fileMetadataForAction}"/>
                             <ui:param name="bean" value="#{DatasetPage}"/>
                             <ui:param name="unrestrictFileAction" value="restrictFiles"/>
-                            <ui:param name="editFileAction" value="editFileMetadata"/>
                             <ui:param name="refreshTagsPopoupAction" value="refreshTagsPopUp"/>                          
                         </ui:include>  
                     </ul>
@@ -406,53 +436,68 @@
                 <div jsf:id="downloadButtonBlockNormal" class="btn-group" 
                      jsf:rendered="#{(!(empty DatasetPage.workingVersion.fileMetadatas) 
                                      and DatasetPage.workingVersion.fileMetadatas.size() > 1) and DatasetPage.downloadButtonAvailable
-                                     and !DatasetPage.isHasTabular()}">
+                                     and  !(DatasetPage.isVersionHasTabular()||DatasetPage.isVersionHasGlobus())}">
                     <p:commandLink 
                                    styleClass="btn btn-default btn-download"
                                    disabled="#{false and DatasetPage.lockedFromDownload}"
                                    onclick="if (!testFilesSelected()) return false;" 
                                    action="#{DatasetPage.startDownloadSelectedOriginal()}"
-                                   update="@form" oncomplete="showPopup();">
+                                   update="@form" oncomplete="showPopup(false);">
+                        <f:actionListener binding="#{DatasetPage.setTermsGuestbookPopupAction(bundle.download)}"/>
                         <f:setPropertyActionListener target="#{DatasetPage.fileMetadataForAction}" value="#{null}"/>
                         <span class="glyphicon glyphicon-download-alt"/> #{bundle.download}
                     </p:commandLink>
                 </div>
-                <div jsf:id="downloadButtonBlockTabular" class="btn-group" 
+                <div jsf:id="downloadButtonBlockMultiple" class="btn-group" 
                      jsf:rendered="#{(!(empty DatasetPage.workingVersion.fileMetadatas) 
                                      and DatasetPage.workingVersion.fileMetadatas.size() > 1) and DatasetPage.downloadButtonAvailable
-                                     and DatasetPage.isHasTabular()}">
+                                     and (DatasetPage.isVersionHasTabular()||DatasetPage.isVersionHasGlobus())}">
                     <button type="button" class="btn btn-default btn-download dropdown-toggle" data-toggle="dropdown">  
                         <span class="glyphicon glyphicon-download-alt"/> #{bundle.download} <span class="caret"></span>
                     </button>
                     <ul jsf:id="downloadDropdownOptions" class="dropdown-menu multi-level pull-right text-left">
-                        <li >
+                        <li>
                             <p:commandLink
                                            disabled="#{false and DatasetPage.lockedFromDownload}"
-                                           update="@form" oncomplete="showPopup();"
+                                           update="@form" oncomplete="showPopup(false);"
                                            onclick="if (!testFilesSelected()) return false;" 
                                            actionListener="#{DatasetPage.startDownloadSelectedOriginal()}">
+                                <f:actionListener binding="#{DatasetPage.setTermsGuestbookPopupAction(bundle.download)}"/>
                                 <f:setPropertyActionListener target="#{DatasetPage.fileMetadataForAction}" value="#{null}"/>
                                 #{bundle.downloadOriginal}
                             </p:commandLink>
                         </li>
-                        <li> 
+                        <li jsf:rendered="#{DatasetPage.isVersionHasTabular()}">
                             <p:commandLink
                                            disabled="#{false and DatasetPage.lockedFromDownload}"
-                                           update="@form" oncomplete="showPopup();"
+                                           update="@form" oncomplete="showPopup(false);"
                                            onclick="if (!testFilesSelected()) return false;" 
                                            actionListener="#{DatasetPage.startDownloadSelectedArchival()}">
+                            <f:actionListener binding="#{DatasetPage.setTermsGuestbookPopupAction(bundle.download)}"/>
                             <f:setPropertyActionListener target="#{DatasetPage.fileMetadataForAction}" value="#{null}"/>
                                 #{bundle.downloadArchival}
                             </p:commandLink>
                         </li>
+                        <li  jsf:rendered="#{DatasetPage.isVersionHasGlobus()}"> 
+                            <p:commandLink
+                                           disabled="#{false and DatasetPage.lockedFromDownload}"
+                                           update="@form" oncomplete="showPopup(true);"
+                                           onclick="if (!testFilesSelected()) return false;" 
+                                           actionListener="#{DatasetPage.startGlobusTransfer(false, false)}">
+                            <f:actionListener binding="#{DatasetPage.setTermsGuestbookPopupAction(bundle.download)}"/>
+                            <f:setPropertyActionListener target="#{DatasetPage.fileMetadataForAction}" value="#{null}"/>
+                                #{bundle['file.globus.transfer']}
+                            </p:commandLink>
+                        </li>
                     </ul>
                 </div>
 
                 <p:commandLink rendered="#{DatasetPage.fileAccessRequestMultiButtonRequired}"
-                               styleClass="btn btn-default btn-request"                                   
-                               action="#{DatasetPage.requestAccessMultipleFiles()}"
-                               update="@form, @([id$=messagePanel])"
-                               disabled="#{DatasetPage.locked}">
+                               styleClass="btn btn-default btn-request"
+                               update="@form, @([id$=messagePanel])" 
+                               action="#{DatasetPage.validateFilesForRequestAccess()}"
+                               disabled="#{DatasetPage.locked or !DatasetPage.fileAccessRequestMultiButtonEnabled}">
+                    <f:actionListener binding="#{DatasetPage.setTermsGuestbookPopupAction(bundle['file.requestAccess'])}"/>
                     <span class="glyphicon glyphicon-bullhorn"/> #{bundle['file.requestAccess']}
                 </p:commandLink>
                 <p:commandLink rendered="#{DatasetPage.fileAccessRequestMultiSignUpButtonRequired}"
@@ -479,14 +524,16 @@
                        data-content="#{DatasetPage.ingestMessage} #{fileMetadata.dataFile.ingestReportMessage}"/>
                 </span>
             </div>
-            <div class="bg-info text-info text-center margin-bottom-half" jsf:rendered="#{fileMetadata.dataFile.fileAccessRequesters.contains(dataverseSession.user)}">
+            <div class="bg-info text-info text-center margin-bottom-half" jsf:rendered="#{fileMetadata.dataFile.containsActiveFileAccessRequestFromUser(dataverseSession.user)}">
                 <!-- Access Requested -->
                 <span class="glyphicon glyphicon-info-sign"/>
                 <span> #{bundle['file.accessRequested']}&#160;</span>
             </div>
             <div class="label label-info remote-info" jsf:rendered="#{not empty fileMetadata.dataFile.storageIO.remoteStoreName}" 
               title="#{bundle['file.remotelyStored']}">
-              <a jsf:rendered="#{not empty fileMetadata.dataFile.storageIO.remoteStoreUrl}" href="#{fileMetadata.dataFile.storageIO.remoteStoreUrl.toString()}" target="_blank" rel="noopener">#{fileMetadata.dataFile.storageIO.remoteStoreName}</a>
+              <div jsf:rendered="#{not empty fileMetadata.dataFile.storageIO.remoteStoreUrl}">
+                  <a href="#{fileMetadata.dataFile.storageIO.remoteStoreUrl.toString()}" target="_blank" rel="noopener">#{fileMetadata.dataFile.storageIO.remoteStoreName}</a>
+              </div>
               <span jsf:rendered="#{empty fileMetadata.dataFile.storageIO.remoteStoreUrl}">#{fileMetadata.dataFile.storageIO.remoteStoreName}</span>
             </div>
             <!-- END: FILE LEVEL MSGs -->
@@ -497,29 +544,38 @@
                     
                     <!-- TO-DO FIX RSYNC LOGIC !fileMetadata.dataFile.filePackage or
                                                     fileMetadata.dataFile.filePackage and systemConfig.HTTPDownload -->
+                     <ui:fragment rendered="#{DatasetPage.isShowQueryButton(fileMetadata.dataFile.id) and fileDownloadHelper.canDownloadFile(fileMetadata)}">
+                        <a class="btn-preview btn btn-link bootstrap-button-tooltip" title="#{DatasetPage.getQueryToolsForDataFile(fileMetadata.dataFile.id).get(0).getDisplayNameLang()}"
+                            href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty fileMetadata.dataFile.globalId ? 'persistentId=' : 'fileId=').concat(!empty fileMetadata.dataFile.globalId ? fileMetadata.dataFile.globalId.asString() : fileMetadata.dataFile.id).concat('&amp;version=').concat(fileMetadata.datasetVersion.friendlyVersionNumber)).concat('&amp;toolType=QUERY')}">
+                            <img src="#{resource['images/Robot-Icon_2.png']}"  width="32" height="32" /><span class="sr-only">#{bundle.preview} "#{empty(fileMetadata.directoryLabel) ? "":fileMetadata.directoryLabel.concat("/")}#{fileMetadata.label}"</span>
+                        </a>                       
+                     </ui:fragment>
+                                   
                     <ui:fragment rendered="#{DatasetPage.isShowPreviewButton(fileMetadata.dataFile.id) and fileDownloadHelper.canDownloadFile(fileMetadata)}">
                         <a class="btn-preview btn btn-link bootstrap-button-tooltip" title="#{DatasetPage.getPreviewToolsForDataFile(fileMetadata.dataFile.id).get(0).getDisplayNameLang()}"
-                            href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty fileMetadata.dataFile.globalIdString ? 'persistentId=' : 'fileId=').concat(!empty fileMetadata.dataFile.globalIdString ? fileMetadata.dataFile.globalIdString : fileMetadata.dataFile.id).concat('&amp;version=').concat(fileMetadata.datasetVersion.friendlyVersionNumber))}">
-                            <span class="glyphicon glyphicon-eye-open"/><span class="sr-only">#{bundle.preview} "#{empty(fileMetadata.directoryLabel) ? "":fileMetadata.directoryLabel.concat("/")}#{fileMetadata.label}"</span>
+                            href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty fileMetadata.dataFile.globalId ? 'persistentId=' : 'fileId=').concat(!empty fileMetadata.dataFile.globalId ? fileMetadata.dataFile.globalId.asString() : fileMetadata.dataFile.id).concat('&amp;version=').concat(fileMetadata.datasetVersion.friendlyVersionNumber)).concat('&amp;toolType=PREVIEW')}">
+                            <span style="margin-top: 10px;"  class="glyphicon glyphicon-eye-open"/><span class="sr-only">#{bundle.preview} "#{empty(fileMetadata.directoryLabel) ? "":fileMetadata.directoryLabel.concat("/")}#{fileMetadata.label}"</span>
                         </a>
                     </ui:fragment>
                     <a type="button" style="padding:6px 8px;" class="btn-access-file btn btn-link bootstrap-button-tooltip dropdown-toggle" 
                        title="#{bundle['file.accessBtn']}" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false" tabindex="0">
-                        <span class="glyphicon glyphicon-download-alt"/><span class="sr-only">#{bundle['file.accessBtn']}</span><span class="caret"></span>
+                        <span style="margin-top: 10px;" class="glyphicon glyphicon-download-alt"/><span class="sr-only">#{bundle['file.accessBtn']}</span><span class="caret"></span>
                     </a>
                     <ul class="dropdown-menu pull-right text-left">
                         <!-- Explore/Download/Request Button Block -->
                         <ui:include src="file-download-button-fragment.xhtml">
                             <ui:param name="fileMetadata" value="#{fileMetadata}"/>
-                            <ui:param name="downloadPopupRequired" value="#{DatasetPage.downloadPopupRequired}"/>
                             <ui:param name="fileMetadataForAction" value="#{DatasetPage.fileMetadataForAction}"/>
-                            <ui:param name="requestAccessPopupRequired" value="#{DatasetPage.requestAccessPopupRequired}"/>
+                            <ui:param name="termsPopupRequired" value="#{DatasetPage.termsPopupRequired}"/>
+                            <ui:param name="guestbookPopupRequiredAtDownload" value="#{DatasetPage.guestbookPopupRequiredAtDownload}"/>        
                             <ui:param name="guestbookResponse" value="#{DatasetPage.guestbookResponse}"/>
+                            <ui:param name="guestbookAndTermsPopupRequired" value="#{DatasetPage.guestbookAndTermsPopupRequired}"/>
                             <ui:param name="guestbookResponseService" value="#{DatasetPage.guestbookResponseService}"/>
                             <ui:param name="fileDownloadService" value="#{DatasetPage.fileDownloadService}"/>
                             <ui:param name="lockedFromDownload" value="#{DatasetPage.lockedFromDownload}"/>
                             <ui:param name="exploreTools" value="#{DatasetPage.getExploreToolsForDataFile(fileMetadata.dataFile.id)}"/>
                             <ui:param name="anonymized" value="#{DatasetPage.anonymizedAccess}"/>
+                            <ui:param name="bean" value="#{DatasetPage}"/>
                         </ui:include>
                         <!-- END: Explore/Download/Request Button Block -->
 
@@ -597,7 +653,7 @@
             <!-- For a file, the payload data ("node") is the corresponding FileMetadata object: -->
             <span class="icon-#{dataFileServiceBean.getFileThumbnailClass(node.dataFile)} text-muted"/>
             <!-- conditional render in file page links, if file has global id, use that; if not, we use database id -->
-            <a href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty node.dataFile.globalIdString ? 'persistentId=' : 'fileId=').concat(!empty node.dataFile.globalIdString ? node.dataFile.globalIdString : node.dataFile.id).concat('&amp;version=').concat(DatasetPage.workingVersion.friendlyVersionNumber))}">
+            <a href="#{widgetWrapper.wrapURL('/file.xhtml?'.concat(!empty node.dataFile.globalId ? 'persistentId=' : 'fileId=').concat(!empty node.dataFile.globalId ? node.dataFile.globalId.asString() : node.dataFile.id).concat('&amp;version=').concat(DatasetPage.workingVersion.friendlyVersionNumber))}">
                 #{node.label}
             </a>
             <span class="text-muted">(#{node.dataFile.friendlySize})</span>
diff --git a/src/main/webapp/globus.xhtml b/src/main/webapp/globus.xhtml
deleted file mode 100644
index f4eebd4babf..00000000000
--- a/src/main/webapp/globus.xhtml
+++ /dev/null
@@ -1,30 +0,0 @@
-<!DOCTYPE html>
-<html xmlns="http://www.w3.org/1999/xhtml"
-      xmlns:h="http://java.sun.com/jsf/html"
-      xmlns:f="http://java.sun.com/jsf/core"
-      xmlns:ui="http://java.sun.com/jsf/facelets"
-      xmlns:p="http://primefaces.org/ui"
-      xmlns:c="http://xmlns.jcp.org/jsp/jstl/core"
-      xmlns:jsf="http://xmlns.jcp.org/jsf">
-<h:head>
-</h:head>
-<h:body>
-    <ui:composition template="/dataverse_template.xhtml">
-        <ui:define name="body">
-            <script>
-                //<![CDATA[
-                var urlParameters = window.location.search
-                console.log(urlParameters);
-                //]]>
-            </script>
-            <f:metadata>
-                <f:viewParam name="code" value="#{GlobusServiceBean.code}" />
-                <f:viewParam name="state" value="#{GlobusServiceBean.state}" />
-                <f:viewAction action="#{GlobusServiceBean.onLoad}" />
-            </f:metadata>
-
-        </ui:define>
-    </ui:composition>
-
-</h:body>
-</html>
diff --git a/src/main/webapp/file-download-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml
similarity index 83%
rename from src/main/webapp/file-download-popup-fragment.xhtml
rename to src/main/webapp/guestbook-terms-popup-fragment.xhtml
index e1020c85e69..5948047d845 100644
--- a/src/main/webapp/file-download-popup-fragment.xhtml
+++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml
@@ -7,20 +7,24 @@
                 xmlns:o="http://omnifaces.org/ui"
                 xmlns:jsf="http://xmlns.jcp.org/jsf"
                 xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs">
-
+                
+    <ui:fragment rendered="#{DatasetPage.editMode != 'CREATE'}">
         <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
         <p:focus context="guestbookUIFragment"/>
-        <p class="help-block">
-            #{bundle['file.downloadDialog.tip']}
-        </p>
-        <p:fragment id="guestbookMessages">
-            <div class="container messagePanel">
-                <iqbs:messages collapsible="true" />
-            </div>
-        </p:fragment>
-        
+        <ui:fragment rendered="#{termsGuestbookPopupAction != bundle.download}">
+            <p class="help-block">
+                #{someActivelyEmbargoedFiles ? bundle['file.requestAccessTermsDialog.embargoed.tip'] : bundle['file.requestAccessTermsDialog.tip']}
+            </p>
+            <p class="help-block" jsf:rendered="#{someActivelyEmbargoedFiles}">
+                #{bundle['file.requestAccessTermsDialog.embargoed']}
+            </p>
+        </ui:fragment>
+        <ui:fragment rendered="#{termsGuestbookPopupAction != bundle.download}">
+            <p class="help-block">
+                #{bundle['file.downloadDialog.tip']}
+            </p>
+        </ui:fragment>
         <div class="form-horizontal terms">
-        
         <div class="form-group">
             <label jsf:for="fdLicense" class="col-sm-3 control-label">
                 #{bundle['file.dataFilesTab.terms.list.license']} </label>
@@ -138,7 +142,10 @@
                 value="#{MarkupChecker:sanitizeBasicHTML(workingVersion.termsOfUseAndAccess.termsOfAccess)}"
                 escape="false" />
         </div>
-        <p:fragment rendered="#{workingVersion.dataset.guestbook != null and workingVersion.dataset.guestbook.enabled and downloadPopupRequired}" id="guestbookUIFragment">
+            <p:fragment rendered="#{guestbookAndTermsPopupRequired and ((termsGuestbookPopupAction != bundle.download) != guestbookPopupRequiredAtDownload)}" id="guestbookUIFragment">
+                <div class="container messagePanel">
+                    <iqbs:messages collapsible="true" />
+                </div>
                 <!--
                 Only validate the active context. For example, the user might be filling out
                 the guestbook on the Preview tab rather than after clicking the "download file"
@@ -244,8 +251,8 @@
                 </div>
             </p:fragment>
         </div>
-        <div class="button-block">
-            <!--
+        <div class="button-block" jsf:rendered="#{termsGuestbookPopupAction == bundle.download}">
+        <!--
             The "process" directive below is very important. Without it, the
             setters on the GuestbookResponse object can be called twice leading
             to form values (name, email, etc) to be overwritten by the object in
@@ -263,15 +270,25 @@
             -->
             <!--REGULAR DOWNLOAD BUTTON, NO EXTERNAL TOOL, NOT THE PREVIEW TAB-->
             <!--Note: the guestbookResponse.fileFormat is being set in xhtml via the initial download buttons in file-download-button-fragment.xhtml -->
+            
             <p:commandButton styleClass="btn btn-default" value="#{bundle.acceptTerms}"
                              rendered="#{guestbookResponse.fileFormat != 'externalTool' and
                                          guestbookResponse.fileFormat != 'package' and
-                                         popupContext != 'previewTab'}"
-                             actionListener="#{fileDownloadHelper.writeGuestbookAndStartDownload(guestbookResponse)}"
+                                         popupContext != 'previewTab' and not isGlobusTransfer}"
+                             actionListener="#{fileDownloadHelper.writeGuestbookAndStartDownload(guestbookResponse, false)}"
                              update="guestbookUIFragment">
                 <f:param name="DO_GB_VALIDATION_#{popupContext}" value="true"/>
             </p:commandButton>
-            <!--PREVIEW TAB BUTTON-->
+            <!--  REGULAR GLOBUS TRANSFER -->
+            <p:commandButton styleClass="btn btn-default" value="#{bundle.acceptTerms}"
+                             rendered="#{guestbookResponse.fileFormat != 'externalTool' and
+                                         guestbookResponse.fileFormat != 'package' and
+                                         popupContext != 'previewTab' and isGlobusTransfer}"
+                             actionListener="#{fileDownloadHelper.writeGuestbookAndStartDownload(guestbookResponse, true)}"
+                             update="guestbookUIFragment">
+                <f:param name="DO_GB_VALIDATION_#{popupContext}" value="true"/>
+            </p:commandButton>
+                        <!--PREVIEW TAB BUTTON-->
             <p:commandButton styleClass="btn btn-default" value="#{bundle.acceptTerms}"
                              rendered="#{popupContext == 'previewTab'}"
                              actionListener="#{FilePage.showPreview(guestbookResponse)}"
@@ -292,9 +309,27 @@
                              update="guestbookUIFragment">
                 <f:param name="DO_GB_VALIDATION_#{popupContext}" value="true"/>
             </p:commandButton>
-            <!--CANCEL BUTTON We don't render the Cancel button on the Preview tab because there is no popup to close.-->
-            <button class="btn btn-link" onclick="PF('downloadPopup').hide();PF('blockDatasetForm').hide();" type="button" jsf:rendered="#{popupContext != 'previewTab'}">
+            <button class="btn btn-link" onclick="PF('guestbookAndTermsPopup').hide();PF('blockDatasetForm').hide();" type="button">
                 #{bundle.cancel}
             </button>
         </div>
-</ui:composition>
\ No newline at end of file
+        <div class="button-block" jsf:rendered="#{termsGuestbookPopupAction == bundle['file.requestAccess']}">
+        <!--TODO - add "Accept and" if ToA exist?-->
+            <p:commandLink type="button" styleClass="btn btn-default" rendered="#{!guestbookPopupRequiredAtDownload}" value="#{bundle['file.requestAccess']}"
+                    actionListener="#{fileDownloadHelper.writeGuestbookResponseAndRequestAccess(guestbookResponse)}"
+                    update="guestbookUIFragment"
+                    oncomplete="if (!args || (args &amp;&amp; !args.validationFailed)){PF('guestbookAndTermsPopup').hide();window.location.replace(window.location.href);}">
+                    <f:param name="DO_GB_VALIDATION_#{popupContext}" value="true"/>
+            </p:commandLink>
+
+            <p:commandLink type="button" styleClass="btn btn-default" rendered="#{guestbookPopupRequiredAtDownload}" value="#{bundle['file.requestAccess']}"
+                    actionListener="#{fileDownloadHelper.requestAccessIndirect()}"
+                    update="guestbookUIFragment"
+                    oncomplete="if (!args || (args &amp;&amp; !args.validationFailed)){PF('guestbookAndTermsPopup').hide();window.location.replace(window.location.href);}">
+                    <f:param name="DO_GB_VALIDATION_#{popupContext}" value="true"/>
+            </p:commandLink>
+
+            <button type="button" class="btn btn-default" onclick="PF('guestbookAndTermsPopup').hide();PF('blockDatasetForm').hide();" value="#{bundle.cancel}">#{bundle.cancel}</button>
+        </div>
+    </ui:fragment>
+</ui:composition>
diff --git a/src/main/webapp/manage-templates.xhtml b/src/main/webapp/manage-templates.xhtml
index c9841ace8e8..879cf9e55c2 100644
--- a/src/main/webapp/manage-templates.xhtml
+++ b/src/main/webapp/manage-templates.xhtml
@@ -139,7 +139,7 @@
                         </p:dataTable>
                     </div>
                     <p:dialog id="deleteTemplateConfirm" header="#{bundle['dataset.manageTemplates.tab.action.btn.delete.dialog.header']}" 
-                              widgetVar="deleteConfirmation" modal="true" focus="contDeleteTemplateBtn">
+                              styleClass="largePopUp" widgetVar="deleteConfirmation" modal="true" focus="contDeleteTemplateBtn">
                         <p class="help-block">
                             <span class="text-warning"><span class="glyphicon glyphicon-warning-sign"/> &#160;<h:outputText value="#{bundle['dataset.manageTemplates.tab.action.btn.delete.dialog.tip']}" /></span>
                         </p>
diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml
index 324fd2e0b84..200d2917b9a 100755
--- a/src/main/webapp/metadataFragment.xhtml
+++ b/src/main/webapp/metadataFragment.xhtml
@@ -13,7 +13,7 @@
            <script src="#{vocabScriptUrl}?version=#{systemConfig.getVersion()}"/>
         </ui:repeat>
         </p:fragment>
-        <c:set var="cvocConf" value="#{settingsWrapper.getCVocConf()}"/>
+        <c:set var="cvocConf" value="#{settingsWrapper.getCVocConf(false)}"/>
     <!-- View Mode -->
     <div class="panel-group" jsf:rendered="${empty editMode  or managePage}">
         <o:importFunctions type="edu.harvard.iq.dataverse.util.MarkupChecker" />
@@ -57,7 +57,7 @@
                                         </th>
                                         <td>#{publicationDate}</td>
                                     </tr>
-                                    <tr id="metadata_citationDate" jsf:rendered="#{!empty publicationDate and (publicationDate ne citationDate)}">
+                                    <tr id="metadata_citationDate" jsf:rendered="#{!empty citationDate and (publicationDate ne citationDate)}">
                                         <th scope="row">
                                             #{bundle['dataset.metadata.citationDate']}
                                             <span class="glyphicon glyphicon-question-sign tooltip-icon"
@@ -73,6 +73,7 @@
                                             <span class="glyphicon glyphicon-question-sign tooltip-icon"
                                                   data-toggle="tooltip" data-placement="auto right" data-original-title="#{dsf.datasetFieldType.localeDescription}"></span>
                                         </th>
+                                        <c:set var="cvocOnDsf" value="#{cvocConf.containsKey(dsf.datasetFieldType.id)}"/>
                                         <td jsf:rendered="#{!anonymized or !settingsWrapper.shouldBeAnonymized(dsf)}">
                                             <!-- Primitive datasetFields -->
                                             <ui:fragment rendered="#{dsf.datasetFieldType.primitive}">
@@ -83,7 +84,7 @@
                                                   </div>
                                                 </ui:fragment>
                                                 <h:outputText value="#{dsf.value}"
-                                                    rendered="#{!dsf.datasetFieldType.allowMultiples and cvocConf.containsKey(dsf.datasetFieldType.id)}"
+                                                    rendered="#{!dsf.datasetFieldType.allowMultiples and cvocOnDsf}"
                                                     escape="#{dsf.datasetFieldType.isEscapeOutputText()}">
                                                     <f:passThroughAttribute name="lang" value="#{DatasetPage.getFieldLanguage(cvocConf.get(dsf.datasetFieldType.id).getString('languages'))}" />
                                                     <f:passThroughAttribute name="data-cvoc-service-url" value="#{cvocConf.get(dsf.datasetFieldType.id).getString('cvoc-url','')}" />
@@ -91,9 +92,9 @@
                                                     <f:passThroughAttribute name="data-cvoc-managedfields" value="#{cvocConf.get(dsf.datasetFieldType.id).get('managedfields').toString()}" />
                                                 </h:outputText>
                                                 <h:outputText value="#{dsf.getDisplayValue(mdLangCode)}"
-                                                    rendered="#{!dsf.datasetFieldType.allowMultiples and !cvocConf.containsKey(dsf.datasetFieldType.id)}"
+                                                    rendered="#{!dsf.datasetFieldType.allowMultiples and !cvocOnDsf}"
                                                     escape="#{dsf.datasetFieldType.isEscapeOutputText()}"/>
-                                                <ui:repeat value="#{dsf.getValues_nondisplay()}" var="value" varStatus="loop" rendered="#{dsf.datasetFieldType.allowMultiples and cvocConf.containsKey(dsf.datasetFieldType.id)}">
+                                                <ui:repeat value="#{dsf.getValues_nondisplay()}" var="value" varStatus="loop" rendered="#{dsf.datasetFieldType.allowMultiples and cvocOnDsf}">
                                                     <h:outputText value="#{loop.first?'':'; '}"/>
                                                     <h:outputText value="#{ value }"
                                                                   escape="#{dsf.datasetFieldType.isEscapeOutputText()}">
@@ -103,7 +104,7 @@
                                                         <f:passThroughAttribute name="data-cvoc-managedfields" value="#{cvocConf.get(dsf.datasetFieldType.id).get('managedfields').toString()}" />
                                                     </h:outputText>
                                                 </ui:repeat>
-                                                <ui:repeat value="#{dsf.getValues(mdLangCode)}" var="value" varStatus="loop" rendered="#{dsf.datasetFieldType.allowMultiples and !cvocConf.containsKey(dsf.datasetFieldType.id)}">
+                                                <ui:repeat value="#{dsf.getValues(mdLangCode)}" var="value" varStatus="loop" rendered="#{dsf.datasetFieldType.allowMultiples and !cvocOnDsf}">
                                                     <h:outputText value="#{loop.first?'':'; '}#{ value }"
                                                                   escape="#{dsf.datasetFieldType.isEscapeOutputText()}"/>
                                                 </ui:repeat>
@@ -123,9 +124,11 @@
                                                 </ui:fragment>
           
                                                 <ui:repeat value="#{dsf.datasetFieldCompoundValues}" var="compoundValue" varStatus="compoundValuesstatus">
+                                                    
                                                     <ui:repeat value="#{compoundValue.displayValueMap.entrySet().toArray()}" var="cvPart" varStatus="partStatus">
-                                                        <h:outputText value="#{dsf.datasetFieldType.displayFormat} " rendered="${!partStatus.first and !cvocConf.containsKey(dsf.datasetFieldType.id)}"/>
-                                                        <ui:fragment rendered="#{!cvocConf.containsKey(dsf.datasetFieldType.id)}">
+                                                        <c:set var="cvocOnCvPart" value="#{cvocConf.containsKey(cvPart.key.datasetFieldType.id)}"/>
+                                                        <h:outputText value="#{dsf.datasetFieldType.displayFormat} " rendered="${!partStatus.first and !(cvocOnDsf or cvocOnCvPart)}"/>
+                                                        <ui:fragment rendered="#{!(cvocOnDsf or cvocOnCvPart)}">
                                                             <ui:fragment rendered="#{compoundValue.isLink(cvPart.key)}">
                                                                 <h:outputLink value="#{compoundValue.getLink()}" target="_blank">
                                                                     <h:outputText value="#{cvPart.value}"
@@ -137,9 +140,20 @@
                                                                               escape="#{cvPart.key.datasetFieldType.isEscapeOutputText()}"/>
                                                             </ui:fragment>
                                                         </ui:fragment>
+                                                        <!--  Cvoc for single child field -->
                                                         <h:outputText value="#{cvPart.key.value}"
                                                                       escape="#{cvPart.key.datasetFieldType.isEscapeOutputText()}" 
-                                                                      rendered="${cvocConf.containsKey(dsf.datasetFieldType.id) and cvPart.key.datasetFieldType.name.equals(cvocConf.get(dsf.datasetFieldType.id).getString('term-uri-field'))}">
+                                                                      rendered="${cvocOnCvPart and cvPart.key.datasetFieldType.name.equals(cvocConf.get(cvPart.key.datasetFieldType.id).getString('term-uri-field'))}">
+                                                                      <f:passThroughAttribute name="lang" value="#{DatasetPage.getFieldLanguage(cvocConf.get(cvPart.key.datasetFieldType.id).getString('languages'))}" />
+                                                                      <f:passThroughAttribute name="data-cvoc-service-url" value="#{cvocConf.get(cvPart.key.datasetFieldType.id).getString('cvoc-url','')}" />
+                                                                      <f:passThroughAttribute name="data-cvoc-protocol" value="#{cvocConf.get(cvPart.key.datasetFieldType.id).getString('protocol','')}" />
+                                                                      <!-- unlikely to be used in this case -->
+                                                                      <f:passThroughAttribute name="data-cvoc-managedfields" value="#{cvocConf.get(cvPart.key.datasetFieldType.id).get('managedfields').toString()}" />
+                                                        </h:outputText>
+                                                        <!--  Cvoc on parent field -->
+                                                        <h:outputText value="#{cvPart.key.value}"
+                                                                      escape="#{cvPart.key.datasetFieldType.isEscapeOutputText()}" 
+                                                                      rendered="${cvocOnDsf and cvPart.key.datasetFieldType.name.equals(cvocConf.get(dsf.datasetFieldType.id).getString('term-uri-field'))}">
                                                                       <f:passThroughAttribute name="lang" value="#{DatasetPage.getFieldLanguage(cvocConf.get(dsf.datasetFieldType.id).getString('languages'))}" />
                                                                       <f:passThroughAttribute name="data-cvoc-service-url" value="#{cvocConf.get(dsf.datasetFieldType.id).getString('cvoc-url','')}" />
                                                                       <f:passThroughAttribute name="data-cvoc-protocol" value="#{cvocConf.get(dsf.datasetFieldType.id).getString('protocol','')}" />
@@ -346,7 +360,7 @@
                                                                 <ui:include src="datasetFieldForEditFragment.xhtml">
                                                                     <ui:param name="dsfv" value="#{subdsf.singleValue}"/>
                                                                     <ui:param name="dsfvIndex" value="0"/>
-                                                                    <ui:param name="cvoc" value="#{cvocConf.containsKey(dsf.datasetFieldType.id)? cvocConf.get(dsf.datasetFieldType.id): null}"/>
+                                                                    <ui:param name="cvoc" value="#{cvocConf.containsKey(dsf.datasetFieldType.id)? cvocConf.get(dsf.datasetFieldType.id): cvocConf.containsKey(dsfv.datasetField.datasetFieldType.id)? cvocConf.get(dsfv.datasetField.datasetFieldType.id): null}"/>
                                                                 </ui:include>
                                                             </ui:fragment>
                                                             <!-- MOVED SELECT ONE TO dataFieldForEdit -->
diff --git a/src/main/webapp/mydata_fragment.xhtml b/src/main/webapp/mydata_fragment.xhtml
index 17cd2d3dbfa..b3828578dca 100644
--- a/src/main/webapp/mydata_fragment.xhtml
+++ b/src/main/webapp/mydata_fragment.xhtml
@@ -79,7 +79,23 @@
                 </div>    
             </ui:repeat>
             </div>
-           <!-- end: role states -->
+            <!-- end: role states -->
+            <!-- start: metadata validity -->
+            <div jsf:rendered="#{MyDataPage.showValidityFilter()}">
+                <input type="hidden" name="filter_validities" id="filter_validities" value="true" />
+                <b>#{bundle['mydataFragment.validity']}</b>
+                <div id="div_dataset_valid" style="border-bottom:1px solid #ddd; padding-bottom:8px; margin-bottom:0.5em;">
+                    <ui:repeat value="#{MyDataPage.validityInfoForCheckboxes}" var="validityInfoList">
+                        <div class="checkbox" style="margin-top:3px;margin-bottom:0;">
+                            <input style="margin-left:0;" name="dataset_valid" id="id_cbox_#{validityInfoList[1]}" type="checkbox" value="#{validityInfoList[0]}" checked="checked" />
+                            <label>
+                                <a class="mydata_validity_label">#{validityInfoList[2]} <span id="id_#{validityInfoList[1]}_count"></span></a>
+                            </label>                    
+                        </div>    
+                    </ui:repeat>
+                </div>
+            </div>
+           <!-- end: metadata validity -->
              <!--/form>-->
             <div id="div-search-params" class="well" style="display:none;"></div>
         </div><!-- end: filter column -->
@@ -112,17 +128,18 @@
     <script>
         //<![CDATA[
         // bundle text variables
-        var mydataresult = '#{bundle['mydata.result']}';
-        var mydataresults = '#{bundle['mydata.results']}';
-        var mydataviewnext = '#{bundle['mydata.viewnext']}';
-        var mydatamore = '#{bundle['mydata.more']}';
-        var draft = '#{bundle['Draft']}';
-        var inreview = '#{bundle['In Review']}';
-        var unpublished = '#{bundle['Unpublished']}';
-        var published = '#{bundle['Published']}';
-        var deaccessioned = '#{bundle['Deaccessioned']}';
-        var mydatato = '#{bundle['to']}';
-        var mydataof = '#{bundle['of']}';
+        var mydataresult = "#{bundle['mydata.result']}";
+        var mydataresults = "#{bundle['mydata.results']}";
+        var mydataviewnext = "#{bundle['mydata.viewnext']}";
+        var mydatamore = "#{bundle['mydata.more']}";
+        var draft = "#{bundle['Draft']}";
+        var inreview = "#{bundle['In Review']}";
+        var unpublished = "#{bundle['Unpublished']}";
+        var published = "#{bundle['Published']}";
+        var deaccessioned = "#{bundle['Deaccessioned']}";
+        var mydatato = "#{bundle['to']}";
+        var mydataof = "#{bundle['of']}";
+        var incomplete = "#{bundle['incomplete']}";
             
         $(document).ready(function() {
               
diff --git a/src/main/webapp/mydata_templates/cards_minimum.html b/src/main/webapp/mydata_templates/cards_minimum.html
index 90b3668b6dc..d1a6ebec600 100644
--- a/src/main/webapp/mydata_templates/cards_minimum.html
+++ b/src/main/webapp/mydata_templates/cards_minimum.html
@@ -35,6 +35,7 @@
         {% if card_info.is_unpublished_state %}<span class="label label-warning unpublished"></span> {% endif %}
         {% if card_info.is_published %} <span class="label label-warning published"></span> {% endif %}
         {% if card_info.is_deaccesioned %} <span class="label label-danger deaccessioned"></span> {% endif %}
+        {% if not card_info.is_valid %} <span class="label label-danger incomplete"></span> {% endif %}
         
         <!-- roles -->
         {% for role_name in card_info.user_roles %}
@@ -97,7 +98,7 @@
     {% else %}
         {% if card_info.description %}
         
-            {{ card_info.description|truncate(250)|safe }}
+            {{ card_info.description|truncate(250) }}
         {% endif %}
     {% endif %}
             
diff --git a/src/main/webapp/permissions-manage-files.xhtml b/src/main/webapp/permissions-manage-files.xhtml
index 122ee2b616f..4e4e56f2051 100644
--- a/src/main/webapp/permissions-manage-files.xhtml
+++ b/src/main/webapp/permissions-manage-files.xhtml
@@ -30,7 +30,7 @@
                         <div class="panel panel-default">
                             <div data-toggle="collapse" data-target="#panelCollapseUsersGroups" class="panel-heading text-info">
                                 #{bundle['dataverse.permissionsFiles.usersOrGroups']} <span class="glyphicon glyphicon-chevron-up"/>
-                                
+
                                 <span class="text-muted small pull-right">#{bundle['dataverse.permissionsFiles.usersOrGroups.description']}</span>
                             </div>
                             <div id="panelCollapseUsersGroups" class="collapse in">
@@ -58,9 +58,9 @@
                                             <p:fragment id="userGroupsRequests" rendered="#{!empty manageFilePermissionsPage.fileAccessRequestMap.entrySet().toArray()}">
                                                 <p>
                                                     <span class="highlightBold">#{manageFilePermissionsPage.fileAccessRequestMap.size()} #{bundle['dataverse.permissions.requests']} </span>
-                                                </p> 
+                                                </p>
                                                 <p:dataTable id="fileAccessRequests" styleClass="margin-bottom" var="access" value="#{manageFilePermissionsPage.fileAccessRequestMap.entrySet().toArray()}">
-                                                    <p:column width="25%" headerText="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup']}" >
+                                                    <p:column width="20%" headerText="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup']}" >
                                                         <h:outputText value="#{access.key.displayInfo.title}"/>
                                                         <h:outputText value=" (#{access.key.displayInfo.affiliation})" rendered="#{!empty access.key.displayInfo.affiliation}"/>
                                                     </p:column>
@@ -68,7 +68,7 @@
                                                         <h:outputText value="#{access.key.identifier}"/>
                                                     </p:column>
                                                     <p:column width="15%" headerText="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.authentication']}">
-                                                        <h:outputText value="#{manageFilePermissionsPage.getAuthProviderFriendlyName(access.key.authProviderId)}"/>
+                                                        <h:outputText value="#{manageFilePermissionsPage.getAuthProviderFriendlyName(access.key.authProviderId)}" />
                                                     </p:column>
                                                     <p:column width="15%" headerText="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.email']}">
                                                         <h:outputText value="#{access.key.email}" rendered="#{!empty access.key.email}"/>
@@ -81,20 +81,29 @@
                                                             <h:outputText value="#{access.value.size()} #{access.value.size() eq 1 ?  bundle['dataverse.permissionsFiles.usersOrGroups.file'] : bundle['dataverse.permissionsFiles.usersOrGroups.files']}"/>
                                                         </p:commandLink>
                                                     </p:column>
-                                                    <p:column width="25%" class="text-center" headerText="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.access']}">
+                                                    <p:column width="10%" class="text-center" headerText="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.accessRequestDate']}">
+                                                        <h:outputText value="#{manageFilePermissionsPage.formatAccessRequestDate(access.value)}"
+                                                                      title="#{manageFilePermissionsPage.formatAccessRequestTimestamp(access.value)}"
+                                                            rendered="#{manageFilePermissionsPage.formatAccessRequestDate(access.value) != null}" />
+                                                        <h:outputText value="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.accessRequestDateNotAvailable']}"
+                                                            rendered="#{manageFilePermissionsPage.formatAccessRequestDate(access.value) == null}" />
+                                                    </p:column>
+                                                    <p:column width="20%" class="text-center" headerText="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.access']}">
                                                         <div class="btn-group" role="group" aria-label="access">
                                                             <p:commandLink styleClass="btn btn-default"
                                                                            actionListener="#{manageFilePermissionsPage.grantAccessToAllRequests(access.key)}"
-                                                                           update=":#{p:resolveClientId('rolesPermissionsForm:userGroups', view)} :#{p:resolveClientId('rolesPermissionsForm:restrictedFiles', view)} :#{p:resolveClientId('rolesPermissionsForm:fileAccessRequests', view)}                                                                       
-                                                                           :#{p:resolveClientId('rolesPermissionsForm:userGroupsRequests', view)}
-                                                                           :#{p:resolveClientId('rolesPermissionsForm:userGroupMessages', view)} @([id$=Messages])">
+                                                                           update=":#{p:resolveClientId('rolesPermissionsForm:userGroups', view)}
+                                                                           :#{p:resolveClientId('rolesPermissionsForm:restrictedFiles', view)}
+                                                                           #{p:resolveClientId('rolesPermissionsForm:usersGroups', view)}
+                                                                           @([id$=Messages])">
                                                                 <span class="glyphicon glyphicon-ok"/> #{bundle['dataverse.permissionsFiles.assignDialog.grantBtn']}
                                                             </p:commandLink>
                                                             <p:commandLink styleClass="btn btn-default"
                                                                            actionListener="#{manageFilePermissionsPage.rejectAccessToAllRequests(access.key)}"
-                                                                           update=":#{p:resolveClientId('rolesPermissionsForm:userGroups', view)} :#{p:resolveClientId('rolesPermissionsForm:restrictedFiles', view)} :#{p:resolveClientId('rolesPermissionsForm:fileAccessRequests', view)}                                                                       
-                                                                           :#{p:resolveClientId('rolesPermissionsForm:userGroupsRequests', view)}
-                                                                           :#{p:resolveClientId('rolesPermissionsForm:userGroupMessages', view)} @([id$=Messages])">
+                                                                           update=":#{p:resolveClientId('rolesPermissionsForm:userGroups', view)}
+                                                                           :#{p:resolveClientId('rolesPermissionsForm:restrictedFiles', view)}
+                                                                           #{p:resolveClientId('rolesPermissionsForm:usersGroups', view)}
+                                                                           @([id$=Messages])">
                                                                 <span class="glyphicon glyphicon-ban-circle"/> #{bundle['dataverse.permissionsFiles.assignDialog.rejectBtn']}
                                                             </p:commandLink>
                                                         </div>
@@ -142,7 +151,7 @@
                         <div class="panel panel-default">
                             <div data-toggle="collapse" data-target="#panelCollapseFiles" class="panel-heading text-info">
                                 #{bundle['dataverse.permissionsFiles.files']} <span class="glyphicon glyphicon-chevron-down"/>
-                                
+
                                 <span class="text-muted small pull-right">#{bundle['dataverse.permissionsFiles.files.description']}</span>
                             </div>
                             <div id="panelCollapseFiles" class="collapse">
@@ -162,7 +171,7 @@
                                             <h:outputFormat styleClass="highlightBold" value="#{manageFilePermissionsPage.fileMap.size()} #{bundle['dataverse.permissionsFiles.files.label']}">
                                                 <f:param value="#{manageFilePermissionsPage.fileMap.size()}"/>
                                             </h:outputFormat>
-                                        </p>  
+                                        </p>
                                         <p:dataTable id="restrictedFiles" var="fileEntry" value="#{manageFilePermissionsPage.fileMap.entrySet().toArray()}"
                                                      emptyMessage="#{bundle['dataverse.permissionsFiles.files.invalidMsg']}">
                                             <p:column width="40%" headerText="#{bundle['dataverse.permissionsFiles.files.tabHeader.fileName']}">
@@ -204,7 +213,7 @@
                                                                oncomplete="PF('assignWidget').show();handleResizeDialog('assignDialog');"><span class="glyphicon glyphicon-plus"/> #{bundle['dataverse.permissionsFiles.files.assignBtn']}</p:commandLink>
                                             </p:column>
                                         </p:dataTable>
-                                    </p:fragment>    
+                                    </p:fragment>
                                 </div>
                             </div>
                         </div>
@@ -221,21 +230,21 @@
                                     </h:outputFormat>
                                 </p>
                             </p:outputPanel>
-                            
+
                             <p:dataTable id="assignedRoles" var="roleAssignment" value="#{manageFilePermissionsPage.roleAssignments}"
-                                         selection="#{manageFilePermissionsPage.selectedRoleAssignmentRows}" rowKey="#{roleAssignment.id}" 
+                                         selection="#{manageFilePermissionsPage.selectedRoleAssignmentRows}" rowKey="#{roleAssignment.id}"
                                          sortBy="#{!empty manageFilePermissionsPage.selectedFile ? roleAssignment.assigneeDisplayInfo.title : roleAssignment.definitionPoint.displayName}"
                                          scrollable="true" scrollHeight="300" disabledSelection="#{!roleAssignment.definitionPoint.instanceofDataFile}">
-                                
+
                                 <p:ajax event="rowSelectCheckbox"  update=":rolesPermissionsForm:selectionRemoveCounter" />
                                 <p:ajax event="rowUnselectCheckbox"  update=":rolesPermissionsForm:selectionRemoveCounter" />
                                 <p:ajax event="toggleSelect"  update=":rolesPermissionsForm:selectionRemoveCounter" />
-                                
+
                                 <p:column selectionMode="multiple" style="width:2%;text-align:center"/>
                                 <p:column width="98%" headerText="#{bundle['dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup']}" rendered="#{!empty manageFilePermissionsPage.selectedFile}">
                                     <h:outputText value="#{roleAssignment.assigneeDisplayInfo.title}"/> <h:outputText value=" (#{roleAssignment.assigneeDisplayInfo.affiliation})" rendered="#{!empty roleAssignment.assigneeDisplayInfo.affiliation}"/>
                                 </p:column>
-                                <p:column width="98%" headerText="#{bundle['dataverse.permissionsFiles.files.tabHeader.fileName']}" rendered="#{!empty manageFilePermissionsPage.selectedRoleAssignee}">                                   
+                                <p:column width="98%" headerText="#{bundle['dataverse.permissionsFiles.files.tabHeader.fileName']}" rendered="#{!empty manageFilePermissionsPage.selectedRoleAssignee}">
                                     <h:outputText rendered="#{!empty roleAssignment.definitionPoint.directoryLabel}" value="#{roleAssignment.definitionPoint.directoryLabel}/" styleClass="text-muted"/>
                                     <h:outputText value="#{roleAssignment.definitionPoint.displayName}"/>
                                     <h:outputText value=" (#{bundle['dataverse.permissionsFiles.files.deleted']}) " rendered="#{roleAssignment.deleted}"/>
@@ -270,17 +279,17 @@
                                     </label>
                                     <div class="col-sm-5">
                                         <p:autoComplete id="userGroupNameAssign" placeholder="#{bundle['dataverse.permissionsFiles.assignDialog.userOrGroup.enterName']}"
-                                                        multiple="true" scrollHeight="180" forceSelection="true" 
-                                                        minQueryLength="2" queryDelay="1000"                                      
-                                                        groupBy="#{roleAssignee.identifier.startsWith('@')?'Users':'Groups'}"                                              
+                                                        multiple="true" scrollHeight="180" forceSelection="true"
+                                                        minQueryLength="2" queryDelay="1000"
+                                                        groupBy="#{roleAssignee.identifier.startsWith('@')?'Users':'Groups'}"
                                                         emptyMessage="#{bundle['dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg']}"
-                                                        value="#{manageFilePermissionsPage.selectedRoleAssignees}"  
+                                                        value="#{manageFilePermissionsPage.selectedRoleAssignees}"
                                                         completeMethod="#{manageFilePermissionsPage.completeRoleAssignee}"
                                                         required="#{param['DO_VALIDATION']}" requiredMessage="#{bundle['dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg']}"
                                                         styleClass="DropdownPopup" panelStyleClass="DropdownPopupPanel"
                                                         var="roleAssignee" itemLabel="#{roleAssignee.displayInfo.title}" itemValue="#{roleAssignee}" converter="roleAssigneeConverter">
                                             <p:column>
-                                                <h:outputText value="#{roleAssignee.displayInfo.title}"/> 
+                                                <h:outputText value="#{roleAssignee.displayInfo.title}"/>
                                                 <h:outputText value=" (#{roleAssignee.displayInfo.affiliation})" rendered="#{!empty roleAssignee.displayInfo.affiliation}"/>
                                             </p:column>
                                             <p:column>
@@ -298,11 +307,11 @@
                                     <p>
                                         <h:outputFormat styleClass="highlightBold" value="#{bundle['dataverse.permissionsFiles.files.selected']}">
                                             <f:param value="#{manageFilePermissionsPage.selectedFiles.size()}"/>
-                                            <f:param value="#{!empty manageFilePermissionsPage.fileRequester ? 
+                                            <f:param value="#{!empty manageFilePermissionsPage.fileRequester ?
                                                         manageFilePermissionsPage.fileAccessRequestMap.get(manageFilePermissionsPage.fileRequester).size()
                                                         :
                                                         manageFilePermissionsPage.fileMap.size()}"/>
-                                            <f:param value="#{!empty manageFilePermissionsPage.fileRequester ? 
+                                            <f:param value="#{!empty manageFilePermissionsPage.fileRequester ?
                                                         bundle['dataverse.permissionsFiles.files.requested']
                                                         :
                                                         bundle['dataverse.permissionsFiles.files']}"/>
@@ -313,7 +322,7 @@
                                 <o:importFunctions type="java.util.Arrays" />
                                 <p:dataTable id="restrictedFilesForAccess" var="file"
                                              value="#{empty manageFilePermissionsPage.fileRequester ? Arrays:asList(manageFilePermissionsPage.fileMap.keySet().toArray()) :
-                                                      manageFilePermissionsPage.fileAccessRequestMap.get(manageFilePermissionsPage.fileRequester)}"
+                                                      manageFilePermissionsPage.dataFilesForRequestor}"
                                              selection="#{manageFilePermissionsPage.selectedFiles}" rowKey="#{file.id}" sortBy="#{file.displayName}"
                                              scrollable="true" scrollHeight="300">
 
diff --git a/src/main/webapp/provenance-popups-fragment.xhtml b/src/main/webapp/provenance-popups-fragment.xhtml
index da644d60f6a..92f109b11d0 100644
--- a/src/main/webapp/provenance-popups-fragment.xhtml
+++ b/src/main/webapp/provenance-popups-fragment.xhtml
@@ -7,7 +7,10 @@
                 xmlns:o="http://omnifaces.org/ui"
                 xmlns:jsf="http://xmlns.jcp.org/jsf"
                 xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs">
-    <script src="#{resource['js/fileupload.js']}"></script>
+
+
+    <h:outputScript name='js/fileupload.js?version=#{systemConfig.getVersion()}' />
+
     <p:dialog id="editProvenancePopup" styleClass="smallPopUp" header="#{bundle['file.editProvenanceDialog']}" widgetVar="editProvenancePopup" modal="true" >
         <p:fragment id="assignMessages">
             <div class="popupMessagePanel">
diff --git a/src/main/webapp/resources/css/fontcustom.css-e b/src/main/webapp/resources/css/fontcustom.css-e
deleted file mode 100644
index 010c155d6e5..00000000000
--- a/src/main/webapp/resources/css/fontcustom.css-e
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
-  Icon Font: fontcustom
-*/
-
-@font-face {
-  font-family: "fontcustom";
-  src: url("../fontcustom/fontcustom_2d8a223b23af3c2ffc668cd1b506f1fa.eot");
-  src: url("../fontcustom/fontcustom_2d8a223b23af3c2ffc668cd1b506f1fa.eot?#iefix") format("embedded-opentype"),
-       url("../fontcustom/fontcustom_2d8a223b23af3c2ffc668cd1b506f1fa.woff2") format("woff2"),
-       url("../fontcustom/fontcustom_2d8a223b23af3c2ffc668cd1b506f1fa.woff") format("woff"),
-       url("../fontcustom/fontcustom_2d8a223b23af3c2ffc668cd1b506f1fa.ttf") format("truetype"),
-       url("../fontcustom/fontcustom_2d8a223b23af3c2ffc668cd1b506f1fa.svg#fontcustom") format("svg");
-  font-weight: normal;
-  font-style: normal;
-}
-
-@media screen and (-webkit-min-device-pixel-ratio:0) {
-  @font-face {
-    font-family: "fontcustom";
-    src: url("../fontcustom/fontcustom_2d8a223b23af3c2ffc668cd1b506f1fa.svg#fontcustom") format("svg");
-  }
-}
-
-[data-icon]:before { content: attr(data-icon); }
-
-[data-icon]:before,
-.icon-astro:before,
-.icon-audio:before,
-.icon-code:before,
-.icon-dataset:before,
-.icon-dataverse:before,
-.icon-document:before,
-.icon-file:before,
-.icon-geodata:before,
-.icon-image:before,
-.icon-network:before,
-.icon-other:before,
-.icon-package:before,
-.icon-tabular:before,
-.icon-unlock:before,
-.icon-video:before {
-  display: inline-block;
-  font-family: "fontcustom";
-  font-style: normal;
-  font-weight: normal;
-  font-variant: normal;
-  line-height: 1;
-  text-decoration: inherit;
-  text-rendering: optimizeLegibility;
-  text-transform: none;
-  -moz-osx-font-smoothing: grayscale;
-  -webkit-font-smoothing: antialiased;
-  font-smoothing: antialiased;
-}
-
-.icon-astro:before { content: "\f100"; }
-.icon-audio:before { content: "\f101"; }
-.icon-code:before { content: "\f102"; }
-.icon-dataset:before { content: "\f103"; }
-.icon-dataverse:before { content: "\f104"; }
-.icon-document:before { content: "\f105"; }
-.icon-file:before { content: "\f106"; }
-.icon-geodata:before { content: "\f107"; }
-.icon-image:before { content: "\f108"; }
-.icon-network:before { content: "\f109"; }
-.icon-other:before { content: "\f10a"; }
-.icon-package:before { content: "\f10b"; }
-.icon-tabular:before { content: "\f10c"; }
-.icon-unlock:before { content: "\f10d"; }
-.icon-video:before { content: "\f10e"; }
diff --git a/src/main/webapp/resources/css/structure.css b/src/main/webapp/resources/css/structure.css
index a0e81f2a8df..470c07d4534 100644
--- a/src/main/webapp/resources/css/structure.css
+++ b/src/main/webapp/resources/css/structure.css
@@ -67,7 +67,7 @@ tr.ui-state-highlight, .ui-widget-content tr.ui-state-highlight, .ui-widget-head
 tr.ui-state-highlight label, .ui-widget-content tr.ui-state-highlight label, .ui-widget-header tr.ui-state-highlight label {color: #333333;}
 tr.ui-state-highlight a:not(.btn), .ui-widget-content tr.ui-state-highlight a:not(.btn), .ui-widget-header tr.ui-state-highlight a:not(.btn) {color: #428bca;}
 tr.ui-state-highlight .ui-icon {
-    background-image: url("/javax.faces.resource/images/ui-icons_333333_256x240.png.xhtml?ln=primefaces-bootstrap");
+    background-image: url("/jakarta.faces.resource/images/ui-icons_333333_256x240.png.xhtml?ln=primefaces-bootstrap");
 }
 td.col-select-width, th.col-select-width {width:36px;}
 
@@ -578,7 +578,8 @@ div[id$='roleDisplay'] span.label, div[id$='roleDetails'] span.label {display:in
 #metrics-content {color:#333;background:#fff;padding:8px 10px;}
 /* -- NEW LAYOUT, DATASET + FILE -- */
 #metrics-block {border:0; margin-top:6px;}
-#metrics-body {border:1px solid #EEE;border-width:1px 0 0 0;padding:8px 0;}
+#metrics-heading-subtitle {font-weight:normal;font-size:smaller;}
+#metrics-body {border:1px solid #EEE;border-top-width:0;padding:8px 10px;}
 #metrics-block .metrics-count-block:not(:last-child) {margin-bottom:.5em;padding-bottom:.5em;border-bottom:1px solid #EEE}
 
 /* -------- SHARRRE -------- */
@@ -1048,6 +1049,10 @@ progress::-webkit-progress-value {
 
 /*External vocabulary support*/
 
+.select2-container {
+    display:block!important;
+}
+
 .select2-container .select2-selection--single {
         border:1px solid #cccccc;
         border-radius: 3px;
@@ -1058,7 +1063,8 @@ progress::-webkit-progress-value {
 
 .select2-container .select2-selection--single .select2-selection__rendered {
         display:inline-block!important;
-        max-width:90%
+        max-width:80%;
+        padding-right:5px!important;
 }
 .select2-selection__clear, .select2-selection__rendered {
         vertical-align: -webkit-baseline-middle;
@@ -1122,6 +1128,10 @@ padding-right:0px;
 a {
     color:#3174AF;
 }
+
+.label-warning a, .bg-citation a, .terms .help-block a, #panelCollapseTOU .help-block a {
+  text-decoration: underline;
+}
 .pagination>.disabled>a {
     color:#767676;
 }
@@ -1137,3 +1147,18 @@ span.label-default { background-color: #757575 }
 .ui-inplace .ui-inputfield {
     width:60%
 }
+/*File sorting*/
+#datasetForm .file-sort {
+	max-width: 255px;
+	float: right;
+	padding-right:21px;
+}
+
+#datasetForm .file-sort .btn-group {
+	padding-top: 4px;
+}
+
+#datasetForm .file-group-by {
+	min-width: 130px;
+	padding: 0px;
+}
diff --git a/src/main/webapp/resources/images/Robot-Icon_2.png b/src/main/webapp/resources/images/Robot-Icon_2.png
new file mode 100644
index 00000000000..42a5e01c406
Binary files /dev/null and b/src/main/webapp/resources/images/Robot-Icon_2.png differ
diff --git a/src/main/webapp/resources/js/crypto-js/4.0.0/core.js b/src/main/webapp/resources/js/crypto-js/4.0.0/core.js
new file mode 100644
index 00000000000..0ef552baa5b
--- /dev/null
+++ b/src/main/webapp/resources/js/crypto-js/4.0.0/core.js
@@ -0,0 +1,797 @@
+;(function (root, factory) {
+	if (typeof exports === "object") {
+		// CommonJS
+		module.exports = exports = factory();
+	}
+	else if (typeof define === "function" && define.amd) {
+		// AMD
+		define([], factory);
+	}
+	else {
+		// Global (browser)
+		root.CryptoJS = factory();
+	}
+}(this, function () {
+
+	/*globals window, global, require*/
+
+	/**
+	 * CryptoJS core components.
+	 */
+	var CryptoJS = CryptoJS || (function (Math, undefined) {
+
+	    var crypto;
+
+	    // Native crypto from window (Browser)
+	    if (typeof window !== 'undefined' && window.crypto) {
+	        crypto = window.crypto;
+	    }
+
+	    // Native (experimental IE 11) crypto from window (Browser)
+	    if (!crypto && typeof window !== 'undefined' && window.msCrypto) {
+	        crypto = window.msCrypto;
+	    }
+
+	    // Native crypto from global (NodeJS)
+	    if (!crypto && typeof global !== 'undefined' && global.crypto) {
+	        crypto = global.crypto;
+	    }
+
+	    // Native crypto import via require (NodeJS)
+	    if (!crypto && typeof require === 'function') {
+	        try {
+	            crypto = require('crypto');
+	        } catch (err) {}
+	    }
+
+	    /*
+	     * Cryptographically secure pseudorandom number generator
+	     *
+	     * As Math.random() is cryptographically not safe to use
+	     */
+	    var cryptoSecureRandomInt = function () {
+	        if (crypto) {
+	            // Use getRandomValues method (Browser)
+	            if (typeof crypto.getRandomValues === 'function') {
+	                try {
+	                    return crypto.getRandomValues(new Uint32Array(1))[0];
+	                } catch (err) {}
+	            }
+
+	            // Use randomBytes method (NodeJS)
+	            if (typeof crypto.randomBytes === 'function') {
+	                try {
+	                    return crypto.randomBytes(4).readInt32LE();
+	                } catch (err) {}
+	            }
+	        }
+
+	        throw new Error('Native crypto module could not be used to get secure random number.');
+	    };
+
+	    /*
+	     * Local polyfill of Object.create
+
+	     */
+	    var create = Object.create || (function () {
+	        function F() {}
+
+	        return function (obj) {
+	            var subtype;
+
+	            F.prototype = obj;
+
+	            subtype = new F();
+
+	            F.prototype = null;
+
+	            return subtype;
+	        };
+	    }())
+
+	    /**
+	     * CryptoJS namespace.
+	     */
+	    var C = {};
+
+	    /**
+	     * Library namespace.
+	     */
+	    var C_lib = C.lib = {};
+
+	    /**
+	     * Base object for prototypal inheritance.
+	     */
+	    var Base = C_lib.Base = (function () {
+
+
+	        return {
+	            /**
+	             * Creates a new object that inherits from this object.
+	             *
+	             * @param {Object} overrides Properties to copy into the new object.
+	             *
+	             * @return {Object} The new object.
+	             *
+	             * @static
+	             *
+	             * @example
+	             *
+	             *     var MyType = CryptoJS.lib.Base.extend({
+	             *         field: 'value',
+	             *
+	             *         method: function () {
+	             *         }
+	             *     });
+	             */
+	            extend: function (overrides) {
+	                // Spawn
+	                var subtype = create(this);
+
+	                // Augment
+	                if (overrides) {
+	                    subtype.mixIn(overrides);
+	                }
+
+	                // Create default initializer
+	                if (!subtype.hasOwnProperty('init') || this.init === subtype.init) {
+	                    subtype.init = function () {
+	                        subtype.$super.init.apply(this, arguments);
+	                    };
+	                }
+
+	                // Initializer's prototype is the subtype object
+	                subtype.init.prototype = subtype;
+
+	                // Reference supertype
+	                subtype.$super = this;
+
+	                return subtype;
+	            },
+
+	            /**
+	             * Extends this object and runs the init method.
+	             * Arguments to create() will be passed to init().
+	             *
+	             * @return {Object} The new object.
+	             *
+	             * @static
+	             *
+	             * @example
+	             *
+	             *     var instance = MyType.create();
+	             */
+	            create: function () {
+	                var instance = this.extend();
+	                instance.init.apply(instance, arguments);
+
+	                return instance;
+	            },
+
+	            /**
+	             * Initializes a newly created object.
+	             * Override this method to add some logic when your objects are created.
+	             *
+	             * @example
+	             *
+	             *     var MyType = CryptoJS.lib.Base.extend({
+	             *         init: function () {
+	             *             // ...
+	             *         }
+	             *     });
+	             */
+	            init: function () {
+	            },
+
+	            /**
+	             * Copies properties into this object.
+	             *
+	             * @param {Object} properties The properties to mix in.
+	             *
+	             * @example
+	             *
+	             *     MyType.mixIn({
+	             *         field: 'value'
+	             *     });
+	             */
+	            mixIn: function (properties) {
+	                for (var propertyName in properties) {
+	                    if (properties.hasOwnProperty(propertyName)) {
+	                        this[propertyName] = properties[propertyName];
+	                    }
+	                }
+
+	                // IE won't copy toString using the loop above
+	                if (properties.hasOwnProperty('toString')) {
+	                    this.toString = properties.toString;
+	                }
+	            },
+
+	            /**
+	             * Creates a copy of this object.
+	             *
+	             * @return {Object} The clone.
+	             *
+	             * @example
+	             *
+	             *     var clone = instance.clone();
+	             */
+	            clone: function () {
+	                return this.init.prototype.extend(this);
+	            }
+	        };
+	    }());
+
+	    /**
+	     * An array of 32-bit words.
+	     *
+	     * @property {Array} words The array of 32-bit words.
+	     * @property {number} sigBytes The number of significant bytes in this word array.
+	     */
+	    var WordArray = C_lib.WordArray = Base.extend({
+	        /**
+	         * Initializes a newly created word array.
+	         *
+	         * @param {Array} words (Optional) An array of 32-bit words.
+	         * @param {number} sigBytes (Optional) The number of significant bytes in the words.
+	         *
+	         * @example
+	         *
+	         *     var wordArray = CryptoJS.lib.WordArray.create();
+	         *     var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]);
+	         *     var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6);
+	         */
+	        init: function (words, sigBytes) {
+	            words = this.words = words || [];
+
+	            if (sigBytes != undefined) {
+	                this.sigBytes = sigBytes;
+	            } else {
+	                this.sigBytes = words.length * 4;
+	            }
+	        },
+
+	        /**
+	         * Converts this word array to a string.
+	         *
+	         * @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex
+	         *
+	         * @return {string} The stringified word array.
+	         *
+	         * @example
+	         *
+	         *     var string = wordArray + '';
+	         *     var string = wordArray.toString();
+	         *     var string = wordArray.toString(CryptoJS.enc.Utf8);
+	         */
+	        toString: function (encoder) {
+	            return (encoder || Hex).stringify(this);
+	        },
+
+	        /**
+	         * Concatenates a word array to this word array.
+	         *
+	         * @param {WordArray} wordArray The word array to append.
+	         *
+	         * @return {WordArray} This word array.
+	         *
+	         * @example
+	         *
+	         *     wordArray1.concat(wordArray2);
+	         */
+	        concat: function (wordArray) {
+	            // Shortcuts
+	            var thisWords = this.words;
+	            var thatWords = wordArray.words;
+	            var thisSigBytes = this.sigBytes;
+	            var thatSigBytes = wordArray.sigBytes;
+
+	            // Clamp excess bits
+	            this.clamp();
+
+	            // Concat
+	            if (thisSigBytes % 4) {
+	                // Copy one byte at a time
+	                for (var i = 0; i < thatSigBytes; i++) {
+	                    var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
+	                    thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8);
+	                }
+	            } else {
+	                // Copy one word at a time
+	                for (var i = 0; i < thatSigBytes; i += 4) {
+	                    thisWords[(thisSigBytes + i) >>> 2] = thatWords[i >>> 2];
+	                }
+	            }
+	            this.sigBytes += thatSigBytes;
+
+	            // Chainable
+	            return this;
+	        },
+
+	        /**
+	         * Removes insignificant bits.
+	         *
+	         * @example
+	         *
+	         *     wordArray.clamp();
+	         */
+	        clamp: function () {
+	            // Shortcuts
+	            var words = this.words;
+	            var sigBytes = this.sigBytes;
+
+	            // Clamp
+	            words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8);
+	            words.length = Math.ceil(sigBytes / 4);
+	        },
+
+	        /**
+	         * Creates a copy of this word array.
+	         *
+	         * @return {WordArray} The clone.
+	         *
+	         * @example
+	         *
+	         *     var clone = wordArray.clone();
+	         */
+	        clone: function () {
+	            var clone = Base.clone.call(this);
+	            clone.words = this.words.slice(0);
+
+	            return clone;
+	        },
+
+	        /**
+	         * Creates a word array filled with random bytes.
+	         *
+	         * @param {number} nBytes The number of random bytes to generate.
+	         *
+	         * @return {WordArray} The random word array.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var wordArray = CryptoJS.lib.WordArray.random(16);
+	         */
+	        random: function (nBytes) {
+	            var words = [];
+
+	            for (var i = 0; i < nBytes; i += 4) {
+	                words.push(cryptoSecureRandomInt());
+	            }
+
+	            return new WordArray.init(words, nBytes);
+	        }
+	    });
+
+	    /**
+	     * Encoder namespace.
+	     */
+	    var C_enc = C.enc = {};
+
+	    /**
+	     * Hex encoding strategy.
+	     */
+	    var Hex = C_enc.Hex = {
+	        /**
+	         * Converts a word array to a hex string.
+	         *
+	         * @param {WordArray} wordArray The word array.
+	         *
+	         * @return {string} The hex string.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var hexString = CryptoJS.enc.Hex.stringify(wordArray);
+	         */
+	        stringify: function (wordArray) {
+	            // Shortcuts
+	            var words = wordArray.words;
+	            var sigBytes = wordArray.sigBytes;
+
+	            // Convert
+	            var hexChars = [];
+	            for (var i = 0; i < sigBytes; i++) {
+	                var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
+	                hexChars.push((bite >>> 4).toString(16));
+	                hexChars.push((bite & 0x0f).toString(16));
+	            }
+
+	            return hexChars.join('');
+	        },
+
+	        /**
+	         * Converts a hex string to a word array.
+	         *
+	         * @param {string} hexStr The hex string.
+	         *
+	         * @return {WordArray} The word array.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var wordArray = CryptoJS.enc.Hex.parse(hexString);
+	         */
+	        parse: function (hexStr) {
+	            // Shortcut
+	            var hexStrLength = hexStr.length;
+
+	            // Convert
+	            var words = [];
+	            for (var i = 0; i < hexStrLength; i += 2) {
+	                words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4);
+	            }
+
+	            return new WordArray.init(words, hexStrLength / 2);
+	        }
+	    };
+
+	    /**
+	     * Latin1 encoding strategy.
+	     */
+	    var Latin1 = C_enc.Latin1 = {
+	        /**
+	         * Converts a word array to a Latin1 string.
+	         *
+	         * @param {WordArray} wordArray The word array.
+	         *
+	         * @return {string} The Latin1 string.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var latin1String = CryptoJS.enc.Latin1.stringify(wordArray);
+	         */
+	        stringify: function (wordArray) {
+	            // Shortcuts
+	            var words = wordArray.words;
+	            var sigBytes = wordArray.sigBytes;
+
+	            // Convert
+	            var latin1Chars = [];
+	            for (var i = 0; i < sigBytes; i++) {
+	                var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff;
+	                latin1Chars.push(String.fromCharCode(bite));
+	            }
+
+	            return latin1Chars.join('');
+	        },
+
+	        /**
+	         * Converts a Latin1 string to a word array.
+	         *
+	         * @param {string} latin1Str The Latin1 string.
+	         *
+	         * @return {WordArray} The word array.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var wordArray = CryptoJS.enc.Latin1.parse(latin1String);
+	         */
+	        parse: function (latin1Str) {
+	            // Shortcut
+	            var latin1StrLength = latin1Str.length;
+
+	            // Convert
+	            var words = [];
+	            for (var i = 0; i < latin1StrLength; i++) {
+	                words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8);
+	            }
+
+	            return new WordArray.init(words, latin1StrLength);
+	        }
+	    };
+
+	    /**
+	     * UTF-8 encoding strategy.
+	     */
+	    var Utf8 = C_enc.Utf8 = {
+	        /**
+	         * Converts a word array to a UTF-8 string.
+	         *
+	         * @param {WordArray} wordArray The word array.
+	         *
+	         * @return {string} The UTF-8 string.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var utf8String = CryptoJS.enc.Utf8.stringify(wordArray);
+	         */
+	        stringify: function (wordArray) {
+	            try {
+	                return decodeURIComponent(escape(Latin1.stringify(wordArray)));
+	            } catch (e) {
+	                throw new Error('Malformed UTF-8 data');
+	            }
+	        },
+
+	        /**
+	         * Converts a UTF-8 string to a word array.
+	         *
+	         * @param {string} utf8Str The UTF-8 string.
+	         *
+	         * @return {WordArray} The word array.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var wordArray = CryptoJS.enc.Utf8.parse(utf8String);
+	         */
+	        parse: function (utf8Str) {
+	            return Latin1.parse(unescape(encodeURIComponent(utf8Str)));
+	        }
+	    };
+
+	    /**
+	     * Abstract buffered block algorithm template.
+	     *
+	     * The property blockSize must be implemented in a concrete subtype.
+	     *
+	     * @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0
+	     */
+	    var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({
+	        /**
+	         * Resets this block algorithm's data buffer to its initial state.
+	         *
+	         * @example
+	         *
+	         *     bufferedBlockAlgorithm.reset();
+	         */
+	        reset: function () {
+	            // Initial values
+	            this._data = new WordArray.init();
+	            this._nDataBytes = 0;
+	        },
+
+	        /**
+	         * Adds new data to this block algorithm's buffer.
+	         *
+	         * @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8.
+	         *
+	         * @example
+	         *
+	         *     bufferedBlockAlgorithm._append('data');
+	         *     bufferedBlockAlgorithm._append(wordArray);
+	         */
+	        _append: function (data) {
+	            // Convert string to WordArray, else assume WordArray already
+	            if (typeof data == 'string') {
+	                data = Utf8.parse(data);
+	            }
+
+	            // Append
+	            this._data.concat(data);
+	            this._nDataBytes += data.sigBytes;
+	        },
+
+	        /**
+	         * Processes available data blocks.
+	         *
+	         * This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype.
+	         *
+	         * @param {boolean} doFlush Whether all blocks and partial blocks should be processed.
+	         *
+	         * @return {WordArray} The processed data.
+	         *
+	         * @example
+	         *
+	         *     var processedData = bufferedBlockAlgorithm._process();
+	         *     var processedData = bufferedBlockAlgorithm._process(!!'flush');
+	         */
+	        _process: function (doFlush) {
+	            var processedWords;
+
+	            // Shortcuts
+	            var data = this._data;
+	            var dataWords = data.words;
+	            var dataSigBytes = data.sigBytes;
+	            var blockSize = this.blockSize;
+	            var blockSizeBytes = blockSize * 4;
+
+	            // Count blocks ready
+	            var nBlocksReady = dataSigBytes / blockSizeBytes;
+	            if (doFlush) {
+	                // Round up to include partial blocks
+	                nBlocksReady = Math.ceil(nBlocksReady);
+	            } else {
+	                // Round down to include only full blocks,
+	                // less the number of blocks that must remain in the buffer
+	                nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0);
+	            }
+
+	            // Count words ready
+	            var nWordsReady = nBlocksReady * blockSize;
+
+	            // Count bytes ready
+	            var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes);
+
+	            // Process blocks
+	            if (nWordsReady) {
+	                for (var offset = 0; offset < nWordsReady; offset += blockSize) {
+	                    // Perform concrete-algorithm logic
+	                    this._doProcessBlock(dataWords, offset);
+	                }
+
+	                // Remove processed words
+	                processedWords = dataWords.splice(0, nWordsReady);
+	                data.sigBytes -= nBytesReady;
+	            }
+
+	            // Return processed words
+	            return new WordArray.init(processedWords, nBytesReady);
+	        },
+
+	        /**
+	         * Creates a copy of this object.
+	         *
+	         * @return {Object} The clone.
+	         *
+	         * @example
+	         *
+	         *     var clone = bufferedBlockAlgorithm.clone();
+	         */
+	        clone: function () {
+	            var clone = Base.clone.call(this);
+	            clone._data = this._data.clone();
+
+	            return clone;
+	        },
+
+	        _minBufferSize: 0
+	    });
+
+	    /**
+	     * Abstract hasher template.
+	     *
+	     * @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits)
+	     */
+	    var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({
+	        /**
+	         * Configuration options.
+	         */
+	        cfg: Base.extend(),
+
+	        /**
+	         * Initializes a newly created hasher.
+	         *
+	         * @param {Object} cfg (Optional) The configuration options to use for this hash computation.
+	         *
+	         * @example
+	         *
+	         *     var hasher = CryptoJS.algo.SHA256.create();
+	         */
+	        init: function (cfg) {
+	            // Apply config defaults
+	            this.cfg = this.cfg.extend(cfg);
+
+	            // Set initial values
+	            this.reset();
+	        },
+
+	        /**
+	         * Resets this hasher to its initial state.
+	         *
+	         * @example
+	         *
+	         *     hasher.reset();
+	         */
+	        reset: function () {
+	            // Reset data buffer
+	            BufferedBlockAlgorithm.reset.call(this);
+
+	            // Perform concrete-hasher logic
+	            this._doReset();
+	        },
+
+	        /**
+	         * Updates this hasher with a message.
+	         *
+	         * @param {WordArray|string} messageUpdate The message to append.
+	         *
+	         * @return {Hasher} This hasher.
+	         *
+	         * @example
+	         *
+	         *     hasher.update('message');
+	         *     hasher.update(wordArray);
+	         */
+	        update: function (messageUpdate) {
+	            // Append
+	            this._append(messageUpdate);
+
+	            // Update the hash
+	            this._process();
+
+	            // Chainable
+	            return this;
+	        },
+
+	        /**
+	         * Finalizes the hash computation.
+	         * Note that the finalize operation is effectively a destructive, read-once operation.
+	         *
+	         * @param {WordArray|string} messageUpdate (Optional) A final message update.
+	         *
+	         * @return {WordArray} The hash.
+	         *
+	         * @example
+	         *
+	         *     var hash = hasher.finalize();
+	         *     var hash = hasher.finalize('message');
+	         *     var hash = hasher.finalize(wordArray);
+	         */
+	        finalize: function (messageUpdate) {
+	            // Final message update
+	            if (messageUpdate) {
+	                this._append(messageUpdate);
+	            }
+
+	            // Perform concrete-hasher logic
+	            var hash = this._doFinalize();
+
+	            return hash;
+	        },
+
+	        blockSize: 512/32,
+
+	        /**
+	         * Creates a shortcut function to a hasher's object interface.
+	         *
+	         * @param {Hasher} hasher The hasher to create a helper for.
+	         *
+	         * @return {Function} The shortcut function.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256);
+	         */
+	        _createHelper: function (hasher) {
+	            return function (message, cfg) {
+	                return new hasher.init(cfg).finalize(message);
+	            };
+	        },
+
+	        /**
+	         * Creates a shortcut function to the HMAC's object interface.
+	         *
+	         * @param {Hasher} hasher The hasher to use in this HMAC helper.
+	         *
+	         * @return {Function} The shortcut function.
+	         *
+	         * @static
+	         *
+	         * @example
+	         *
+	         *     var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256);
+	         */
+	        _createHmacHelper: function (hasher) {
+	            return function (message, key) {
+	                return new C_algo.HMAC.init(hasher, key).finalize(message);
+	            };
+	        }
+	    });
+
+	    /**
+	     * Algorithm namespace.
+	     */
+	    var C_algo = C.algo = {};
+
+	    return C;
+	}(Math));
+
+
+	return CryptoJS;
+
+}));
\ No newline at end of file
diff --git a/src/main/webapp/resources/js/crypto-js/4.0.0/md5.js b/src/main/webapp/resources/js/crypto-js/4.0.0/md5.js
new file mode 100644
index 00000000000..12b0fdd4b4f
--- /dev/null
+++ b/src/main/webapp/resources/js/crypto-js/4.0.0/md5.js
@@ -0,0 +1,268 @@
+;(function (root, factory) {
+	if (typeof exports === "object") {
+		// CommonJS
+		module.exports = exports = factory(require("./core"));
+	}
+	else if (typeof define === "function" && define.amd) {
+		// AMD
+		define(["./core"], factory);
+	}
+	else {
+		// Global (browser)
+		factory(root.CryptoJS);
+	}
+}(this, function (CryptoJS) {
+
+	(function (Math) {
+	    // Shortcuts
+	    var C = CryptoJS;
+	    var C_lib = C.lib;
+	    var WordArray = C_lib.WordArray;
+	    var Hasher = C_lib.Hasher;
+	    var C_algo = C.algo;
+
+	    // Constants table
+	    var T = [];
+
+	    // Compute constants
+	    (function () {
+	        for (var i = 0; i < 64; i++) {
+	            T[i] = (Math.abs(Math.sin(i + 1)) * 0x100000000) | 0;
+	        }
+	    }());
+
+	    /**
+	     * MD5 hash algorithm.
+	     */
+	    var MD5 = C_algo.MD5 = Hasher.extend({
+	        _doReset: function () {
+	            this._hash = new WordArray.init([
+	                0x67452301, 0xefcdab89,
+	                0x98badcfe, 0x10325476
+	            ]);
+	        },
+
+	        _doProcessBlock: function (M, offset) {
+	            // Swap endian
+	            for (var i = 0; i < 16; i++) {
+	                // Shortcuts
+	                var offset_i = offset + i;
+	                var M_offset_i = M[offset_i];
+
+	                M[offset_i] = (
+	                    (((M_offset_i << 8)  | (M_offset_i >>> 24)) & 0x00ff00ff) |
+	                    (((M_offset_i << 24) | (M_offset_i >>> 8))  & 0xff00ff00)
+	                );
+	            }
+
+	            // Shortcuts
+	            var H = this._hash.words;
+
+	            var M_offset_0  = M[offset + 0];
+	            var M_offset_1  = M[offset + 1];
+	            var M_offset_2  = M[offset + 2];
+	            var M_offset_3  = M[offset + 3];
+	            var M_offset_4  = M[offset + 4];
+	            var M_offset_5  = M[offset + 5];
+	            var M_offset_6  = M[offset + 6];
+	            var M_offset_7  = M[offset + 7];
+	            var M_offset_8  = M[offset + 8];
+	            var M_offset_9  = M[offset + 9];
+	            var M_offset_10 = M[offset + 10];
+	            var M_offset_11 = M[offset + 11];
+	            var M_offset_12 = M[offset + 12];
+	            var M_offset_13 = M[offset + 13];
+	            var M_offset_14 = M[offset + 14];
+	            var M_offset_15 = M[offset + 15];
+
+	            // Working varialbes
+	            var a = H[0];
+	            var b = H[1];
+	            var c = H[2];
+	            var d = H[3];
+
+	            // Computation
+	            a = FF(a, b, c, d, M_offset_0,  7,  T[0]);
+	            d = FF(d, a, b, c, M_offset_1,  12, T[1]);
+	            c = FF(c, d, a, b, M_offset_2,  17, T[2]);
+	            b = FF(b, c, d, a, M_offset_3,  22, T[3]);
+	            a = FF(a, b, c, d, M_offset_4,  7,  T[4]);
+	            d = FF(d, a, b, c, M_offset_5,  12, T[5]);
+	            c = FF(c, d, a, b, M_offset_6,  17, T[6]);
+	            b = FF(b, c, d, a, M_offset_7,  22, T[7]);
+	            a = FF(a, b, c, d, M_offset_8,  7,  T[8]);
+	            d = FF(d, a, b, c, M_offset_9,  12, T[9]);
+	            c = FF(c, d, a, b, M_offset_10, 17, T[10]);
+	            b = FF(b, c, d, a, M_offset_11, 22, T[11]);
+	            a = FF(a, b, c, d, M_offset_12, 7,  T[12]);
+	            d = FF(d, a, b, c, M_offset_13, 12, T[13]);
+	            c = FF(c, d, a, b, M_offset_14, 17, T[14]);
+	            b = FF(b, c, d, a, M_offset_15, 22, T[15]);
+
+	            a = GG(a, b, c, d, M_offset_1,  5,  T[16]);
+	            d = GG(d, a, b, c, M_offset_6,  9,  T[17]);
+	            c = GG(c, d, a, b, M_offset_11, 14, T[18]);
+	            b = GG(b, c, d, a, M_offset_0,  20, T[19]);
+	            a = GG(a, b, c, d, M_offset_5,  5,  T[20]);
+	            d = GG(d, a, b, c, M_offset_10, 9,  T[21]);
+	            c = GG(c, d, a, b, M_offset_15, 14, T[22]);
+	            b = GG(b, c, d, a, M_offset_4,  20, T[23]);
+	            a = GG(a, b, c, d, M_offset_9,  5,  T[24]);
+	            d = GG(d, a, b, c, M_offset_14, 9,  T[25]);
+	            c = GG(c, d, a, b, M_offset_3,  14, T[26]);
+	            b = GG(b, c, d, a, M_offset_8,  20, T[27]);
+	            a = GG(a, b, c, d, M_offset_13, 5,  T[28]);
+	            d = GG(d, a, b, c, M_offset_2,  9,  T[29]);
+	            c = GG(c, d, a, b, M_offset_7,  14, T[30]);
+	            b = GG(b, c, d, a, M_offset_12, 20, T[31]);
+
+	            a = HH(a, b, c, d, M_offset_5,  4,  T[32]);
+	            d = HH(d, a, b, c, M_offset_8,  11, T[33]);
+	            c = HH(c, d, a, b, M_offset_11, 16, T[34]);
+	            b = HH(b, c, d, a, M_offset_14, 23, T[35]);
+	            a = HH(a, b, c, d, M_offset_1,  4,  T[36]);
+	            d = HH(d, a, b, c, M_offset_4,  11, T[37]);
+	            c = HH(c, d, a, b, M_offset_7,  16, T[38]);
+	            b = HH(b, c, d, a, M_offset_10, 23, T[39]);
+	            a = HH(a, b, c, d, M_offset_13, 4,  T[40]);
+	            d = HH(d, a, b, c, M_offset_0,  11, T[41]);
+	            c = HH(c, d, a, b, M_offset_3,  16, T[42]);
+	            b = HH(b, c, d, a, M_offset_6,  23, T[43]);
+	            a = HH(a, b, c, d, M_offset_9,  4,  T[44]);
+	            d = HH(d, a, b, c, M_offset_12, 11, T[45]);
+	            c = HH(c, d, a, b, M_offset_15, 16, T[46]);
+	            b = HH(b, c, d, a, M_offset_2,  23, T[47]);
+
+	            a = II(a, b, c, d, M_offset_0,  6,  T[48]);
+	            d = II(d, a, b, c, M_offset_7,  10, T[49]);
+	            c = II(c, d, a, b, M_offset_14, 15, T[50]);
+	            b = II(b, c, d, a, M_offset_5,  21, T[51]);
+	            a = II(a, b, c, d, M_offset_12, 6,  T[52]);
+	            d = II(d, a, b, c, M_offset_3,  10, T[53]);
+	            c = II(c, d, a, b, M_offset_10, 15, T[54]);
+	            b = II(b, c, d, a, M_offset_1,  21, T[55]);
+	            a = II(a, b, c, d, M_offset_8,  6,  T[56]);
+	            d = II(d, a, b, c, M_offset_15, 10, T[57]);
+	            c = II(c, d, a, b, M_offset_6,  15, T[58]);
+	            b = II(b, c, d, a, M_offset_13, 21, T[59]);
+	            a = II(a, b, c, d, M_offset_4,  6,  T[60]);
+	            d = II(d, a, b, c, M_offset_11, 10, T[61]);
+	            c = II(c, d, a, b, M_offset_2,  15, T[62]);
+	            b = II(b, c, d, a, M_offset_9,  21, T[63]);
+
+	            // Intermediate hash value
+	            H[0] = (H[0] + a) | 0;
+	            H[1] = (H[1] + b) | 0;
+	            H[2] = (H[2] + c) | 0;
+	            H[3] = (H[3] + d) | 0;
+	        },
+
+	        _doFinalize: function () {
+	            // Shortcuts
+	            var data = this._data;
+	            var dataWords = data.words;
+
+	            var nBitsTotal = this._nDataBytes * 8;
+	            var nBitsLeft = data.sigBytes * 8;
+
+	            // Add padding
+	            dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32);
+
+	            var nBitsTotalH = Math.floor(nBitsTotal / 0x100000000);
+	            var nBitsTotalL = nBitsTotal;
+	            dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = (
+	                (((nBitsTotalH << 8)  | (nBitsTotalH >>> 24)) & 0x00ff00ff) |
+	                (((nBitsTotalH << 24) | (nBitsTotalH >>> 8))  & 0xff00ff00)
+	            );
+	            dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = (
+	                (((nBitsTotalL << 8)  | (nBitsTotalL >>> 24)) & 0x00ff00ff) |
+	                (((nBitsTotalL << 24) | (nBitsTotalL >>> 8))  & 0xff00ff00)
+	            );
+
+	            data.sigBytes = (dataWords.length + 1) * 4;
+
+	            // Hash final blocks
+	            this._process();
+
+	            // Shortcuts
+	            var hash = this._hash;
+	            var H = hash.words;
+
+	            // Swap endian
+	            for (var i = 0; i < 4; i++) {
+	                // Shortcut
+	                var H_i = H[i];
+
+	                H[i] = (((H_i << 8)  | (H_i >>> 24)) & 0x00ff00ff) |
+	                       (((H_i << 24) | (H_i >>> 8))  & 0xff00ff00);
+	            }
+
+	            // Return final computed hash
+	            return hash;
+	        },
+
+	        clone: function () {
+	            var clone = Hasher.clone.call(this);
+	            clone._hash = this._hash.clone();
+
+	            return clone;
+	        }
+	    });
+
+	    function FF(a, b, c, d, x, s, t) {
+	        var n = a + ((b & c) | (~b & d)) + x + t;
+	        return ((n << s) | (n >>> (32 - s))) + b;
+	    }
+
+	    function GG(a, b, c, d, x, s, t) {
+	        var n = a + ((b & d) | (c & ~d)) + x + t;
+	        return ((n << s) | (n >>> (32 - s))) + b;
+	    }
+
+	    function HH(a, b, c, d, x, s, t) {
+	        var n = a + (b ^ c ^ d) + x + t;
+	        return ((n << s) | (n >>> (32 - s))) + b;
+	    }
+
+	    function II(a, b, c, d, x, s, t) {
+	        var n = a + (c ^ (b | ~d)) + x + t;
+	        return ((n << s) | (n >>> (32 - s))) + b;
+	    }
+
+	    /**
+	     * Shortcut function to the hasher's object interface.
+	     *
+	     * @param {WordArray|string} message The message to hash.
+	     *
+	     * @return {WordArray} The hash.
+	     *
+	     * @static
+	     *
+	     * @example
+	     *
+	     *     var hash = CryptoJS.MD5('message');
+	     *     var hash = CryptoJS.MD5(wordArray);
+	     */
+	    C.MD5 = Hasher._createHelper(MD5);
+
+	    /**
+	     * Shortcut function to the HMAC's object interface.
+	     *
+	     * @param {WordArray|string} message The message to hash.
+	     * @param {WordArray|string} key The secret key.
+	     *
+	     * @return {WordArray} The HMAC.
+	     *
+	     * @static
+	     *
+	     * @example
+	     *
+	     *     var hmac = CryptoJS.HmacMD5(message, key);
+	     */
+	    C.HmacMD5 = Hasher._createHmacHelper(MD5);
+	}(Math));
+
+
+	return CryptoJS.MD5;
+
+}));
\ No newline at end of file
diff --git a/src/main/webapp/resources/js/crypto-js/4.0.0/sha1.js b/src/main/webapp/resources/js/crypto-js/4.0.0/sha1.js
new file mode 100644
index 00000000000..669114962a3
--- /dev/null
+++ b/src/main/webapp/resources/js/crypto-js/4.0.0/sha1.js
@@ -0,0 +1,150 @@
+;(function (root, factory) {
+	if (typeof exports === "object") {
+		// CommonJS
+		module.exports = exports = factory(require("./core"));
+	}
+	else if (typeof define === "function" && define.amd) {
+		// AMD
+		define(["./core"], factory);
+	}
+	else {
+		// Global (browser)
+		factory(root.CryptoJS);
+	}
+}(this, function (CryptoJS) {
+
+	(function () {
+	    // Shortcuts
+	    var C = CryptoJS;
+	    var C_lib = C.lib;
+	    var WordArray = C_lib.WordArray;
+	    var Hasher = C_lib.Hasher;
+	    var C_algo = C.algo;
+
+	    // Reusable object
+	    var W = [];
+
+	    /**
+	     * SHA-1 hash algorithm.
+	     */
+	    var SHA1 = C_algo.SHA1 = Hasher.extend({
+	        _doReset: function () {
+	            this._hash = new WordArray.init([
+	                0x67452301, 0xefcdab89,
+	                0x98badcfe, 0x10325476,
+	                0xc3d2e1f0
+	            ]);
+	        },
+
+	        _doProcessBlock: function (M, offset) {
+	            // Shortcut
+	            var H = this._hash.words;
+
+	            // Working variables
+	            var a = H[0];
+	            var b = H[1];
+	            var c = H[2];
+	            var d = H[3];
+	            var e = H[4];
+
+	            // Computation
+	            for (var i = 0; i < 80; i++) {
+	                if (i < 16) {
+	                    W[i] = M[offset + i] | 0;
+	                } else {
+	                    var n = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
+	                    W[i] = (n << 1) | (n >>> 31);
+	                }
+
+	                var t = ((a << 5) | (a >>> 27)) + e + W[i];
+	                if (i < 20) {
+	                    t += ((b & c) | (~b & d)) + 0x5a827999;
+	                } else if (i < 40) {
+	                    t += (b ^ c ^ d) + 0x6ed9eba1;
+	                } else if (i < 60) {
+	                    t += ((b & c) | (b & d) | (c & d)) - 0x70e44324;
+	                } else /* if (i < 80) */ {
+	                    t += (b ^ c ^ d) - 0x359d3e2a;
+	                }
+
+	                e = d;
+	                d = c;
+	                c = (b << 30) | (b >>> 2);
+	                b = a;
+	                a = t;
+	            }
+
+	            // Intermediate hash value
+	            H[0] = (H[0] + a) | 0;
+	            H[1] = (H[1] + b) | 0;
+	            H[2] = (H[2] + c) | 0;
+	            H[3] = (H[3] + d) | 0;
+	            H[4] = (H[4] + e) | 0;
+	        },
+
+	        _doFinalize: function () {
+	            // Shortcuts
+	            var data = this._data;
+	            var dataWords = data.words;
+
+	            var nBitsTotal = this._nDataBytes * 8;
+	            var nBitsLeft = data.sigBytes * 8;
+
+	            // Add padding
+	            dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32);
+	            dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000);
+	            dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal;
+	            data.sigBytes = dataWords.length * 4;
+
+	            // Hash final blocks
+	            this._process();
+
+	            // Return final computed hash
+	            return this._hash;
+	        },
+
+	        clone: function () {
+	            var clone = Hasher.clone.call(this);
+	            clone._hash = this._hash.clone();
+
+	            return clone;
+	        }
+	    });
+
+	    /**
+	     * Shortcut function to the hasher's object interface.
+	     *
+	     * @param {WordArray|string} message The message to hash.
+	     *
+	     * @return {WordArray} The hash.
+	     *
+	     * @static
+	     *
+	     * @example
+	     *
+	     *     var hash = CryptoJS.SHA1('message');
+	     *     var hash = CryptoJS.SHA1(wordArray);
+	     */
+	    C.SHA1 = Hasher._createHelper(SHA1);
+
+	    /**
+	     * Shortcut function to the HMAC's object interface.
+	     *
+	     * @param {WordArray|string} message The message to hash.
+	     * @param {WordArray|string} key The secret key.
+	     *
+	     * @return {WordArray} The HMAC.
+	     *
+	     * @static
+	     *
+	     * @example
+	     *
+	     *     var hmac = CryptoJS.HmacSHA1(message, key);
+	     */
+	    C.HmacSHA1 = Hasher._createHmacHelper(SHA1);
+	}());
+
+
+	return CryptoJS.SHA1;
+
+}));
\ No newline at end of file
diff --git a/src/main/webapp/resources/js/crypto-js/4.0.0/sha256.js b/src/main/webapp/resources/js/crypto-js/4.0.0/sha256.js
new file mode 100644
index 00000000000..de2d7fca101
--- /dev/null
+++ b/src/main/webapp/resources/js/crypto-js/4.0.0/sha256.js
@@ -0,0 +1,199 @@
+;(function (root, factory) {
+	if (typeof exports === "object") {
+		// CommonJS
+		module.exports = exports = factory(require("./core"));
+	}
+	else if (typeof define === "function" && define.amd) {
+		// AMD
+		define(["./core"], factory);
+	}
+	else {
+		// Global (browser)
+		factory(root.CryptoJS);
+	}
+}(this, function (CryptoJS) {
+
+	(function (Math) {
+	    // Shortcuts
+	    var C = CryptoJS;
+	    var C_lib = C.lib;
+	    var WordArray = C_lib.WordArray;
+	    var Hasher = C_lib.Hasher;
+	    var C_algo = C.algo;
+
+	    // Initialization and round constants tables
+	    var H = [];
+	    var K = [];
+
+	    // Compute constants
+	    (function () {
+	        function isPrime(n) {
+	            var sqrtN = Math.sqrt(n);
+	            for (var factor = 2; factor <= sqrtN; factor++) {
+	                if (!(n % factor)) {
+	                    return false;
+	                }
+	            }
+
+	            return true;
+	        }
+
+	        function getFractionalBits(n) {
+	            return ((n - (n | 0)) * 0x100000000) | 0;
+	        }
+
+	        var n = 2;
+	        var nPrime = 0;
+	        while (nPrime < 64) {
+	            if (isPrime(n)) {
+	                if (nPrime < 8) {
+	                    H[nPrime] = getFractionalBits(Math.pow(n, 1 / 2));
+	                }
+	                K[nPrime] = getFractionalBits(Math.pow(n, 1 / 3));
+
+	                nPrime++;
+	            }
+
+	            n++;
+	        }
+	    }());
+
+	    // Reusable object
+	    var W = [];
+
+	    /**
+	     * SHA-256 hash algorithm.
+	     */
+	    var SHA256 = C_algo.SHA256 = Hasher.extend({
+	        _doReset: function () {
+	            this._hash = new WordArray.init(H.slice(0));
+	        },
+
+	        _doProcessBlock: function (M, offset) {
+	            // Shortcut
+	            var H = this._hash.words;
+
+	            // Working variables
+	            var a = H[0];
+	            var b = H[1];
+	            var c = H[2];
+	            var d = H[3];
+	            var e = H[4];
+	            var f = H[5];
+	            var g = H[6];
+	            var h = H[7];
+
+	            // Computation
+	            for (var i = 0; i < 64; i++) {
+	                if (i < 16) {
+	                    W[i] = M[offset + i] | 0;
+	                } else {
+	                    var gamma0x = W[i - 15];
+	                    var gamma0  = ((gamma0x << 25) | (gamma0x >>> 7))  ^
+	                                  ((gamma0x << 14) | (gamma0x >>> 18)) ^
+	                                   (gamma0x >>> 3);
+
+	                    var gamma1x = W[i - 2];
+	                    var gamma1  = ((gamma1x << 15) | (gamma1x >>> 17)) ^
+	                                  ((gamma1x << 13) | (gamma1x >>> 19)) ^
+	                                   (gamma1x >>> 10);
+
+	                    W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16];
+	                }
+
+	                var ch  = (e & f) ^ (~e & g);
+	                var maj = (a & b) ^ (a & c) ^ (b & c);
+
+	                var sigma0 = ((a << 30) | (a >>> 2)) ^ ((a << 19) | (a >>> 13)) ^ ((a << 10) | (a >>> 22));
+	                var sigma1 = ((e << 26) | (e >>> 6)) ^ ((e << 21) | (e >>> 11)) ^ ((e << 7)  | (e >>> 25));
+
+	                var t1 = h + sigma1 + ch + K[i] + W[i];
+	                var t2 = sigma0 + maj;
+
+	                h = g;
+	                g = f;
+	                f = e;
+	                e = (d + t1) | 0;
+	                d = c;
+	                c = b;
+	                b = a;
+	                a = (t1 + t2) | 0;
+	            }
+
+	            // Intermediate hash value
+	            H[0] = (H[0] + a) | 0;
+	            H[1] = (H[1] + b) | 0;
+	            H[2] = (H[2] + c) | 0;
+	            H[3] = (H[3] + d) | 0;
+	            H[4] = (H[4] + e) | 0;
+	            H[5] = (H[5] + f) | 0;
+	            H[6] = (H[6] + g) | 0;
+	            H[7] = (H[7] + h) | 0;
+	        },
+
+	        _doFinalize: function () {
+	            // Shortcuts
+	            var data = this._data;
+	            var dataWords = data.words;
+
+	            var nBitsTotal = this._nDataBytes * 8;
+	            var nBitsLeft = data.sigBytes * 8;
+
+	            // Add padding
+	            dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32);
+	            dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000);
+	            dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal;
+	            data.sigBytes = dataWords.length * 4;
+
+	            // Hash final blocks
+	            this._process();
+
+	            // Return final computed hash
+	            return this._hash;
+	        },
+
+	        clone: function () {
+	            var clone = Hasher.clone.call(this);
+	            clone._hash = this._hash.clone();
+
+	            return clone;
+	        }
+	    });
+
+	    /**
+	     * Shortcut function to the hasher's object interface.
+	     *
+	     * @param {WordArray|string} message The message to hash.
+	     *
+	     * @return {WordArray} The hash.
+	     *
+	     * @static
+	     *
+	     * @example
+	     *
+	     *     var hash = CryptoJS.SHA256('message');
+	     *     var hash = CryptoJS.SHA256(wordArray);
+	     */
+	    C.SHA256 = Hasher._createHelper(SHA256);
+
+	    /**
+	     * Shortcut function to the HMAC's object interface.
+	     *
+	     * @param {WordArray|string} message The message to hash.
+	     * @param {WordArray|string} key The secret key.
+	     *
+	     * @return {WordArray} The HMAC.
+	     *
+	     * @static
+	     *
+	     * @example
+	     *
+	     *     var hmac = CryptoJS.HmacSHA256(message, key);
+	     */
+	    C.HmacSHA256 = Hasher._createHmacHelper(SHA256);
+	}(Math));
+
+
+	return CryptoJS.SHA256;
+
+}));
\ No newline at end of file
diff --git a/src/main/webapp/resources/js/crypto-js/4.0.0/sha512.js b/src/main/webapp/resources/js/crypto-js/4.0.0/sha512.js
new file mode 100644
index 00000000000..d274ab0de38
--- /dev/null
+++ b/src/main/webapp/resources/js/crypto-js/4.0.0/sha512.js
@@ -0,0 +1,326 @@
+;(function (root, factory, undef) {
+	if (typeof exports === "object") {
+		// CommonJS
+		module.exports = exports = factory(require("./core"), require("./x64-core"));
+	}
+	else if (typeof define === "function" && define.amd) {
+		// AMD
+		define(["./core", "./x64-core"], factory);
+	}
+	else {
+		// Global (browser)
+		factory(root.CryptoJS);
+	}
+}(this, function (CryptoJS) {
+
+	(function () {
+	    // Shortcuts
+	    var C = CryptoJS;
+	    var C_lib = C.lib;
+	    var Hasher = C_lib.Hasher;
+	    var C_x64 = C.x64;
+	    var X64Word = C_x64.Word;
+	    var X64WordArray = C_x64.WordArray;
+	    var C_algo = C.algo;
+
+	    function X64Word_create() {
+	        return X64Word.create.apply(X64Word, arguments);
+	    }
+
+	    // Constants
+	    var K = [
+	        X64Word_create(0x428a2f98, 0xd728ae22), X64Word_create(0x71374491, 0x23ef65cd),
+	        X64Word_create(0xb5c0fbcf, 0xec4d3b2f), X64Word_create(0xe9b5dba5, 0x8189dbbc),
+	        X64Word_create(0x3956c25b, 0xf348b538), X64Word_create(0x59f111f1, 0xb605d019),
+	        X64Word_create(0x923f82a4, 0xaf194f9b), X64Word_create(0xab1c5ed5, 0xda6d8118),
+	        X64Word_create(0xd807aa98, 0xa3030242), X64Word_create(0x12835b01, 0x45706fbe),
+	        X64Word_create(0x243185be, 0x4ee4b28c), X64Word_create(0x550c7dc3, 0xd5ffb4e2),
+	        X64Word_create(0x72be5d74, 0xf27b896f), X64Word_create(0x80deb1fe, 0x3b1696b1),
+	        X64Word_create(0x9bdc06a7, 0x25c71235), X64Word_create(0xc19bf174, 0xcf692694),
+	        X64Word_create(0xe49b69c1, 0x9ef14ad2), X64Word_create(0xefbe4786, 0x384f25e3),
+	        X64Word_create(0x0fc19dc6, 0x8b8cd5b5), X64Word_create(0x240ca1cc, 0x77ac9c65),
+	        X64Word_create(0x2de92c6f, 0x592b0275), X64Word_create(0x4a7484aa, 0x6ea6e483),
+	        X64Word_create(0x5cb0a9dc, 0xbd41fbd4), X64Word_create(0x76f988da, 0x831153b5),
+	        X64Word_create(0x983e5152, 0xee66dfab), X64Word_create(0xa831c66d, 0x2db43210),
+	        X64Word_create(0xb00327c8, 0x98fb213f), X64Word_create(0xbf597fc7, 0xbeef0ee4),
+	        X64Word_create(0xc6e00bf3, 0x3da88fc2), X64Word_create(0xd5a79147, 0x930aa725),
+	        X64Word_create(0x06ca6351, 0xe003826f), X64Word_create(0x14292967, 0x0a0e6e70),
+	        X64Word_create(0x27b70a85, 0x46d22ffc), X64Word_create(0x2e1b2138, 0x5c26c926),
+	        X64Word_create(0x4d2c6dfc, 0x5ac42aed), X64Word_create(0x53380d13, 0x9d95b3df),
+	        X64Word_create(0x650a7354, 0x8baf63de), X64Word_create(0x766a0abb, 0x3c77b2a8),
+	        X64Word_create(0x81c2c92e, 0x47edaee6), X64Word_create(0x92722c85, 0x1482353b),
+	        X64Word_create(0xa2bfe8a1, 0x4cf10364), X64Word_create(0xa81a664b, 0xbc423001),
+	        X64Word_create(0xc24b8b70, 0xd0f89791), X64Word_create(0xc76c51a3, 0x0654be30),
+	        X64Word_create(0xd192e819, 0xd6ef5218), X64Word_create(0xd6990624, 0x5565a910),
+	        X64Word_create(0xf40e3585, 0x5771202a), X64Word_create(0x106aa070, 0x32bbd1b8),
+	        X64Word_create(0x19a4c116, 0xb8d2d0c8), X64Word_create(0x1e376c08, 0x5141ab53),
+	        X64Word_create(0x2748774c, 0xdf8eeb99), X64Word_create(0x34b0bcb5, 0xe19b48a8),
+	        X64Word_create(0x391c0cb3, 0xc5c95a63), X64Word_create(0x4ed8aa4a, 0xe3418acb),
+	        X64Word_create(0x5b9cca4f, 0x7763e373), X64Word_create(0x682e6ff3, 0xd6b2b8a3),
+	        X64Word_create(0x748f82ee, 0x5defb2fc), X64Word_create(0x78a5636f, 0x43172f60),
+	        X64Word_create(0x84c87814, 0xa1f0ab72), X64Word_create(0x8cc70208, 0x1a6439ec),
+	        X64Word_create(0x90befffa, 0x23631e28), X64Word_create(0xa4506ceb, 0xde82bde9),
+	        X64Word_create(0xbef9a3f7, 0xb2c67915), X64Word_create(0xc67178f2, 0xe372532b),
+	        X64Word_create(0xca273ece, 0xea26619c), X64Word_create(0xd186b8c7, 0x21c0c207),
+	        X64Word_create(0xeada7dd6, 0xcde0eb1e), X64Word_create(0xf57d4f7f, 0xee6ed178),
+	        X64Word_create(0x06f067aa, 0x72176fba), X64Word_create(0x0a637dc5, 0xa2c898a6),
+	        X64Word_create(0x113f9804, 0xbef90dae), X64Word_create(0x1b710b35, 0x131c471b),
+	        X64Word_create(0x28db77f5, 0x23047d84), X64Word_create(0x32caab7b, 0x40c72493),
+	        X64Word_create(0x3c9ebe0a, 0x15c9bebc), X64Word_create(0x431d67c4, 0x9c100d4c),
+	        X64Word_create(0x4cc5d4be, 0xcb3e42b6), X64Word_create(0x597f299c, 0xfc657e2a),
+	        X64Word_create(0x5fcb6fab, 0x3ad6faec), X64Word_create(0x6c44198c, 0x4a475817)
+	    ];
+
+	    // Reusable objects
+	    var W = [];
+	    (function () {
+	        for (var i = 0; i < 80; i++) {
+	            W[i] = X64Word_create();
+	        }
+	    }());
+
+	    /**
+	     * SHA-512 hash algorithm.
+	     */
+	    var SHA512 = C_algo.SHA512 = Hasher.extend({
+	        _doReset: function () {
+	            this._hash = new X64WordArray.init([
+	                new X64Word.init(0x6a09e667, 0xf3bcc908), new X64Word.init(0xbb67ae85, 0x84caa73b),
+	                new X64Word.init(0x3c6ef372, 0xfe94f82b), new X64Word.init(0xa54ff53a, 0x5f1d36f1),
+	                new X64Word.init(0x510e527f, 0xade682d1), new X64Word.init(0x9b05688c, 0x2b3e6c1f),
+	                new X64Word.init(0x1f83d9ab, 0xfb41bd6b), new X64Word.init(0x5be0cd19, 0x137e2179)
+	            ]);
+	        },
+
+	        _doProcessBlock: function (M, offset) {
+	            // Shortcuts
+	            var H = this._hash.words;
+
+	            var H0 = H[0];
+	            var H1 = H[1];
+	            var H2 = H[2];
+	            var H3 = H[3];
+	            var H4 = H[4];
+	            var H5 = H[5];
+	            var H6 = H[6];
+	            var H7 = H[7];
+
+	            var H0h = H0.high;
+	            var H0l = H0.low;
+	            var H1h = H1.high;
+	            var H1l = H1.low;
+	            var H2h = H2.high;
+	            var H2l = H2.low;
+	            var H3h = H3.high;
+	            var H3l = H3.low;
+	            var H4h = H4.high;
+	            var H4l = H4.low;
+	            var H5h = H5.high;
+	            var H5l = H5.low;
+	            var H6h = H6.high;
+	            var H6l = H6.low;
+	            var H7h = H7.high;
+	            var H7l = H7.low;
+
+	            // Working variables
+	            var ah = H0h;
+	            var al = H0l;
+	            var bh = H1h;
+	            var bl = H1l;
+	            var ch = H2h;
+	            var cl = H2l;
+	            var dh = H3h;
+	            var dl = H3l;
+	            var eh = H4h;
+	            var el = H4l;
+	            var fh = H5h;
+	            var fl = H5l;
+	            var gh = H6h;
+	            var gl = H6l;
+	            var hh = H7h;
+	            var hl = H7l;
+
+	            // Rounds
+	            for (var i = 0; i < 80; i++) {
+	                var Wil;
+	                var Wih;
+
+	                // Shortcut
+	                var Wi = W[i];
+
+	                // Extend message
+	                if (i < 16) {
+	                    Wih = Wi.high = M[offset + i * 2]     | 0;
+	                    Wil = Wi.low  = M[offset + i * 2 + 1] | 0;
+	                } else {
+	                    // Gamma0
+	                    var gamma0x  = W[i - 15];
+	                    var gamma0xh = gamma0x.high;
+	                    var gamma0xl = gamma0x.low;
+	                    var gamma0h  = ((gamma0xh >>> 1) | (gamma0xl << 31)) ^ ((gamma0xh >>> 8) | (gamma0xl << 24)) ^ (gamma0xh >>> 7);
+	                    var gamma0l  = ((gamma0xl >>> 1) | (gamma0xh << 31)) ^ ((gamma0xl >>> 8) | (gamma0xh << 24)) ^ ((gamma0xl >>> 7) | (gamma0xh << 25));
+
+	                    // Gamma1
+	                    var gamma1x  = W[i - 2];
+	                    var gamma1xh = gamma1x.high;
+	                    var gamma1xl = gamma1x.low;
+	                    var gamma1h  = ((gamma1xh >>> 19) | (gamma1xl << 13)) ^ ((gamma1xh << 3) | (gamma1xl >>> 29)) ^ (gamma1xh >>> 6);
+	                    var gamma1l  = ((gamma1xl >>> 19) | (gamma1xh << 13)) ^ ((gamma1xl << 3) | (gamma1xh >>> 29)) ^ ((gamma1xl >>> 6) | (gamma1xh << 26));
+
+	                    // W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16]
+	                    var Wi7  = W[i - 7];
+	                    var Wi7h = Wi7.high;
+	                    var Wi7l = Wi7.low;
+
+	                    var Wi16  = W[i - 16];
+	                    var Wi16h = Wi16.high;
+	                    var Wi16l = Wi16.low;
+
+	                    Wil = gamma0l + Wi7l;
+	                    Wih = gamma0h + Wi7h + ((Wil >>> 0) < (gamma0l >>> 0) ? 1 : 0);
+	                    Wil = Wil + gamma1l;
+	                    Wih = Wih + gamma1h + ((Wil >>> 0) < (gamma1l >>> 0) ? 1 : 0);
+	                    Wil = Wil + Wi16l;
+	                    Wih = Wih + Wi16h + ((Wil >>> 0) < (Wi16l >>> 0) ? 1 : 0);
+
+	                    Wi.high = Wih;
+	                    Wi.low  = Wil;
+	                }
+
+	                var chh  = (eh & fh) ^ (~eh & gh);
+	                var chl  = (el & fl) ^ (~el & gl);
+	                var majh = (ah & bh) ^ (ah & ch) ^ (bh & ch);
+	                var majl = (al & bl) ^ (al & cl) ^ (bl & cl);
+
+	                var sigma0h = ((ah >>> 28) | (al << 4))  ^ ((ah << 30)  | (al >>> 2)) ^ ((ah << 25) | (al >>> 7));
+	                var sigma0l = ((al >>> 28) | (ah << 4))  ^ ((al << 30)  | (ah >>> 2)) ^ ((al << 25) | (ah >>> 7));
+	                var sigma1h = ((eh >>> 14) | (el << 18)) ^ ((eh >>> 18) | (el << 14)) ^ ((eh << 23) | (el >>> 9));
+	                var sigma1l = ((el >>> 14) | (eh << 18)) ^ ((el >>> 18) | (eh << 14)) ^ ((el << 23) | (eh >>> 9));
+
+	                // t1 = h + sigma1 + ch + K[i] + W[i]
+	                var Ki  = K[i];
+	                var Kih = Ki.high;
+	                var Kil = Ki.low;
+
+	                var t1l = hl + sigma1l;
+	                var t1h = hh + sigma1h + ((t1l >>> 0) < (hl >>> 0) ? 1 : 0);
+	                var t1l = t1l + chl;
+	                var t1h = t1h + chh + ((t1l >>> 0) < (chl >>> 0) ? 1 : 0);
+	                var t1l = t1l + Kil;
+	                var t1h = t1h + Kih + ((t1l >>> 0) < (Kil >>> 0) ? 1 : 0);
+	                var t1l = t1l + Wil;
+	                var t1h = t1h + Wih + ((t1l >>> 0) < (Wil >>> 0) ? 1 : 0);
+
+	                // t2 = sigma0 + maj
+	                var t2l = sigma0l + majl;
+	                var t2h = sigma0h + majh + ((t2l >>> 0) < (sigma0l >>> 0) ? 1 : 0);
+
+	                // Update working variables
+	                hh = gh;
+	                hl = gl;
+	                gh = fh;
+	                gl = fl;
+	                fh = eh;
+	                fl = el;
+	                el = (dl + t1l) | 0;
+	                eh = (dh + t1h + ((el >>> 0) < (dl >>> 0) ? 1 : 0)) | 0;
+	                dh = ch;
+	                dl = cl;
+	                ch = bh;
+	                cl = bl;
+	                bh = ah;
+	                bl = al;
+	                al = (t1l + t2l) | 0;
+	                ah = (t1h + t2h + ((al >>> 0) < (t1l >>> 0) ? 1 : 0)) | 0;
+	            }
+
+	            // Intermediate hash value
+	            H0l = H0.low  = (H0l + al);
+	            H0.high = (H0h + ah + ((H0l >>> 0) < (al >>> 0) ? 1 : 0));
+	            H1l = H1.low  = (H1l + bl);
+	            H1.high = (H1h + bh + ((H1l >>> 0) < (bl >>> 0) ? 1 : 0));
+	            H2l = H2.low  = (H2l + cl);
+	            H2.high = (H2h + ch + ((H2l >>> 0) < (cl >>> 0) ? 1 : 0));
+	            H3l = H3.low  = (H3l + dl);
+	            H3.high = (H3h + dh + ((H3l >>> 0) < (dl >>> 0) ? 1 : 0));
+	            H4l = H4.low  = (H4l + el);
+	            H4.high = (H4h + eh + ((H4l >>> 0) < (el >>> 0) ? 1 : 0));
+	            H5l = H5.low  = (H5l + fl);
+	            H5.high = (H5h + fh + ((H5l >>> 0) < (fl >>> 0) ? 1 : 0));
+	            H6l = H6.low  = (H6l + gl);
+	            H6.high = (H6h + gh + ((H6l >>> 0) < (gl >>> 0) ? 1 : 0));
+	            H7l = H7.low  = (H7l + hl);
+	            H7.high = (H7h + hh + ((H7l >>> 0) < (hl >>> 0) ? 1 : 0));
+	        },
+
+	        _doFinalize: function () {
+	            // Shortcuts
+	            var data = this._data;
+	            var dataWords = data.words;
+
+	            var nBitsTotal = this._nDataBytes * 8;
+	            var nBitsLeft = data.sigBytes * 8;
+
+	            // Add padding
+	            dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32);
+	            dataWords[(((nBitsLeft + 128) >>> 10) << 5) + 30] = Math.floor(nBitsTotal / 0x100000000);
+	            dataWords[(((nBitsLeft + 128) >>> 10) << 5) + 31] = nBitsTotal;
+	            data.sigBytes = dataWords.length * 4;
+
+	            // Hash final blocks
+	            this._process();
+
+	            // Convert hash to 32-bit word array before returning
+	            var hash = this._hash.toX32();
+
+	            // Return final computed hash
+	            return hash;
+	        },
+
+	        clone: function () {
+	            var clone = Hasher.clone.call(this);
+	            clone._hash = this._hash.clone();
+
+	            return clone;
+	        },
+
+	        blockSize: 1024/32
+	    });
+
+	    /**
+	     * Shortcut function to the hasher's object interface.
+	     *
+	     * @param {WordArray|string} message The message to hash.
+	     *
+	     * @return {WordArray} The hash.
+	     *
+	     * @static
+	     *
+	     * @example
+	     *
+	     *     var hash = CryptoJS.SHA512('message');
+	     *     var hash = CryptoJS.SHA512(wordArray);
+	     */
+	    C.SHA512 = Hasher._createHelper(SHA512);
+
+	    /**
+	     * Shortcut function to the HMAC's object interface.
+	     *
+	     * @param {WordArray|string} message The message to hash.
+	     * @param {WordArray|string} key The secret key.
+	     *
+	     * @return {WordArray} The HMAC.
+	     *
+	     * @static
+	     *
+	     * @example
+	     *
+	     *     var hmac = CryptoJS.HmacSHA512(message, key);
+	     */
+	    C.HmacSHA512 = Hasher._createHmacHelper(SHA512);
+	}());
+
+
+	return CryptoJS.SHA512;
+
+}));
\ No newline at end of file
diff --git a/src/main/webapp/resources/js/crypto-js/4.0.0/x64-core.js b/src/main/webapp/resources/js/crypto-js/4.0.0/x64-core.js
new file mode 100644
index 00000000000..57dcc144b04
--- /dev/null
+++ b/src/main/webapp/resources/js/crypto-js/4.0.0/x64-core.js
@@ -0,0 +1,304 @@
+;(function (root, factory) {
+	if (typeof exports === "object") {
+		// CommonJS
+		module.exports = exports = factory(require("./core"));
+	}
+	else if (typeof define === "function" && define.amd) {
+		// AMD
+		define(["./core"], factory);
+	}
+	else {
+		// Global (browser)
+		factory(root.CryptoJS);
+	}
+}(this, function (CryptoJS) {
+
+	(function (undefined) {
+	    // Shortcuts
+	    var C = CryptoJS;
+	    var C_lib = C.lib;
+	    var Base = C_lib.Base;
+	    var X32WordArray = C_lib.WordArray;
+
+	    /**
+	     * x64 namespace.
+	     */
+	    var C_x64 = C.x64 = {};
+
+	    /**
+	     * A 64-bit word.
+	     */
+	    var X64Word = C_x64.Word = Base.extend({
+	        /**
+	         * Initializes a newly created 64-bit word.
+	         *
+	         * @param {number} high The high 32 bits.
+	         * @param {number} low The low 32 bits.
+	         *
+	         * @example
+	         *
+	         *     var x64Word = CryptoJS.x64.Word.create(0x00010203, 0x04050607);
+	         */
+	        init: function (high, low) {
+	            this.high = high;
+	            this.low = low;
+	        }
+
+	        /**
+	         * Bitwise NOTs this word.
+	         *
+	         * @return {X64Word} A new x64-Word object after negating.
+	         *
+	         * @example
+	         *
+	         *     var negated = x64Word.not();
+	         */
+	        // not: function () {
+	            // var high = ~this.high;
+	            // var low = ~this.low;
+
+	            // return X64Word.create(high, low);
+	        // },
+
+	        /**
+	         * Bitwise ANDs this word with the passed word.
+	         *
+	         * @param {X64Word} word The x64-Word to AND with this word.
+	         *
+	         * @return {X64Word} A new x64-Word object after ANDing.
+	         *
+	         * @example
+	         *
+	         *     var anded = x64Word.and(anotherX64Word);
+	         */
+	        // and: function (word) {
+	            // var high = this.high & word.high;
+	            // var low = this.low & word.low;
+
+	            // return X64Word.create(high, low);
+	        // },
+
+	        /**
+	         * Bitwise ORs this word with the passed word.
+	         *
+	         * @param {X64Word} word The x64-Word to OR with this word.
+	         *
+	         * @return {X64Word} A new x64-Word object after ORing.
+	         *
+	         * @example
+	         *
+	         *     var ored = x64Word.or(anotherX64Word);
+	         */
+	        // or: function (word) {
+	            // var high = this.high | word.high;
+	            // var low = this.low | word.low;
+
+	            // return X64Word.create(high, low);
+	        // },
+
+	        /**
+	         * Bitwise XORs this word with the passed word.
+	         *
+	         * @param {X64Word} word The x64-Word to XOR with this word.
+	         *
+	         * @return {X64Word} A new x64-Word object after XORing.
+	         *
+	         * @example
+	         *
+	         *     var xored = x64Word.xor(anotherX64Word);
+	         */
+	        // xor: function (word) {
+	            // var high = this.high ^ word.high;
+	            // var low = this.low ^ word.low;
+
+	            // return X64Word.create(high, low);
+	        // },
+
+	        /**
+	         * Shifts this word n bits to the left.
+	         *
+	         * @param {number} n The number of bits to shift.
+	         *
+	         * @return {X64Word} A new x64-Word object after shifting.
+	         *
+	         * @example
+	         *
+	         *     var shifted = x64Word.shiftL(25);
+	         */
+	        // shiftL: function (n) {
+	            // if (n < 32) {
+	                // var high = (this.high << n) | (this.low >>> (32 - n));
+	                // var low = this.low << n;
+	            // } else {
+	                // var high = this.low << (n - 32);
+	                // var low = 0;
+	            // }
+
+	            // return X64Word.create(high, low);
+	        // },
+
+	        /**
+	         * Shifts this word n bits to the right.
+	         *
+	         * @param {number} n The number of bits to shift.
+	         *
+	         * @return {X64Word} A new x64-Word object after shifting.
+	         *
+	         * @example
+	         *
+	         *     var shifted = x64Word.shiftR(7);
+	         */
+	        // shiftR: function (n) {
+	            // if (n < 32) {
+	                // var low = (this.low >>> n) | (this.high << (32 - n));
+	                // var high = this.high >>> n;
+	            // } else {
+	                // var low = this.high >>> (n - 32);
+	                // var high = 0;
+	            // }
+
+	            // return X64Word.create(high, low);
+	        // },
+
+	        /**
+	         * Rotates this word n bits to the left.
+	         *
+	         * @param {number} n The number of bits to rotate.
+	         *
+	         * @return {X64Word} A new x64-Word object after rotating.
+	         *
+	         * @example
+	         *
+	         *     var rotated = x64Word.rotL(25);
+	         */
+	        // rotL: function (n) {
+	            // return this.shiftL(n).or(this.shiftR(64 - n));
+	        // },
+
+	        /**
+	         * Rotates this word n bits to the right.
+	         *
+	         * @param {number} n The number of bits to rotate.
+	         *
+	         * @return {X64Word} A new x64-Word object after rotating.
+	         *
+	         * @example
+	         *
+	         *     var rotated = x64Word.rotR(7);
+	         */
+	        // rotR: function (n) {
+	            // return this.shiftR(n).or(this.shiftL(64 - n));
+	        // },
+
+	        /**
+	         * Adds this word with the passed word.
+	         *
+	         * @param {X64Word} word The x64-Word to add with this word.
+	         *
+	         * @return {X64Word} A new x64-Word object after adding.
+	         *
+	         * @example
+	         *
+	         *     var added = x64Word.add(anotherX64Word);
+	         */
+	        // add: function (word) {
+	            // var low = (this.low + word.low) | 0;
+	            // var carry = (low >>> 0) < (this.low >>> 0) ? 1 : 0;
+	            // var high = (this.high + word.high + carry) | 0;
+
+	            // return X64Word.create(high, low);
+	        // }
+	    });
+
+	    /**
+	     * An array of 64-bit words.
+	     *
+	     * @property {Array} words The array of CryptoJS.x64.Word objects.
+	     * @property {number} sigBytes The number of significant bytes in this word array.
+	     */
+	    var X64WordArray = C_x64.WordArray = Base.extend({
+	        /**
+	         * Initializes a newly created word array.
+	         *
+	         * @param {Array} words (Optional) An array of CryptoJS.x64.Word objects.
+	         * @param {number} sigBytes (Optional) The number of significant bytes in the words.
+	         *
+	         * @example
+	         *
+	         *     var wordArray = CryptoJS.x64.WordArray.create();
+	         *
+	         *     var wordArray = CryptoJS.x64.WordArray.create([
+	         *         CryptoJS.x64.Word.create(0x00010203, 0x04050607),
+	         *         CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f)
+	         *     ]);
+	         *
+	         *     var wordArray = CryptoJS.x64.WordArray.create([
+	         *         CryptoJS.x64.Word.create(0x00010203, 0x04050607),
+	         *         CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f)
+	         *     ], 10);
+	         */
+	        init: function (words, sigBytes) {
+	            words = this.words = words || [];
+
+	            if (sigBytes != undefined) {
+	                this.sigBytes = sigBytes;
+	            } else {
+	                this.sigBytes = words.length * 8;
+	            }
+	        },
+
+	        /**
+	         * Converts this 64-bit word array to a 32-bit word array.
+	         *
+	         * @return {CryptoJS.lib.WordArray} This word array's data as a 32-bit word array.
+	         *
+	         * @example
+	         *
+	         *     var x32WordArray = x64WordArray.toX32();
+	         */
+	        toX32: function () {
+	            // Shortcuts
+	            var x64Words = this.words;
+	            var x64WordsLength = x64Words.length;
+
+	            // Convert
+	            var x32Words = [];
+	            for (var i = 0; i < x64WordsLength; i++) {
+	                var x64Word = x64Words[i];
+	                x32Words.push(x64Word.high);
+	                x32Words.push(x64Word.low);
+	            }
+
+	            return X32WordArray.create(x32Words, this.sigBytes);
+	        },
+
+	        /**
+	         * Creates a copy of this word array.
+	         *
+	         * @return {X64WordArray} The clone.
+	         *
+	         * @example
+	         *
+	         *     var clone = x64WordArray.clone();
+	         */
+	        clone: function () {
+	            var clone = Base.clone.call(this);
+
+	            // Clone "words" array
+	            var words = clone.words = this.words.slice(0);
+
+	            // Clone each X64Word object
+	            var wordsLength = words.length;
+	            for (var i = 0; i < wordsLength; i++) {
+	                words[i] = words[i].clone();
+	            }
+
+	            return clone;
+	        }
+	    });
+	}());
+
+
+	return CryptoJS;
+
+}));
\ No newline at end of file
diff --git a/src/main/webapp/resources/js/fileupload.js b/src/main/webapp/resources/js/fileupload.js
index a478235c09f..08d6956b62c 100644
--- a/src/main/webapp/resources/js/fileupload.js
+++ b/src/main/webapp/resources/js/fileupload.js
@@ -1,645 +1,689 @@
-var fileList = [];
-var observer2 = null;
-var numDone = 0;
-var delay = 100; //milliseconds
-var UploadState = {
-        QUEUED: 'queued',
-        REQUESTING: 'requesting',
-        UPLOADING: 'uploading',
-        UPLOADED: 'uploaded',
-        HASHED: 'hashed',
-        FINISHED: 'finished',
-        FAILED: 'failed'
-}
-
-//true indicates direct upload is being used, but cancel may set it back to false at which point direct upload functions should not do further work
-var directUploadEnabled = false;
-
-var directUploadReport = true;
-
-//How many files have started being processed but aren't yet being uploaded
-var filesInProgress = 0;
-//The # of the current file being processed (total number of files for which upload has at least started)
-var curFile = 0;
-//The number of upload ids that have been assigned in the files table
-var getUpId = (function() {
-        var counter = -1;
-        return function() { counter += 1; return counter }
-})();
-//How many files are completely done
-var finishFile = (function() {
-        var counter = 0;
-        return function() { counter += 1; return counter }
-})();
-
-
-function setupDirectUpload(enabled) {
-        if (enabled) {
-                directUploadEnabled = true;
-                //An indicator as to which version is being used - should keep updated.
-                console.log('Dataverse Direct Upload for v5.0');
-                $('.ui-fileupload-upload').hide();
-                $('.ui-fileupload-cancel').hide();
-                //Catch files entered via upload dialog box. Since this 'select' widget is replaced by PF, we need to add a listener again when it is replaced
-                var fileInput = document.getElementById('datasetForm:fileUpload_input');
-                if (fileInput !== null) {
-                        fileInput.addEventListener('change', function(event) {
-                                fileList = [];
-                                for (var i = 0; i < fileInput.files.length; i++) {
-                                        queueFileForDirectUpload(fileInput.files[i]);
-                                }
-                        }, { once: false });
-                }
-                //Add support for drag and drop. Since the fileUploadForm is not replaced by PF, catching changes with a mutationobserver isn't needed
-                var fileDropWidget = document.getElementById('datasetForm:fileUpload');
-                fileDropWidget.addEventListener('drop', function(event) {
-                        fileList = [];
-                        for (var i = 0; i < event.dataTransfer.files.length; i++) {
-                                queueFileForDirectUpload(event.dataTransfer.files[i]);
-                        }
-                }, { once: false });
-
-                var config = { childList: true };
-                var callback = function(mutations) {
-                        mutations.forEach(function(mutation) {
-                                for (i = 0; i < mutation.addedNodes.length; i++) {
-                                        //Add a listener on any replacement file 'select' widget
-                                        if (mutation.addedNodes[i].id == 'datasetForm:fileUpload_input') {
-                                                fileInput = mutation.addedNodes[i];
-                                                mutation.addedNodes[i].addEventListener('change', function(event) {
-                                                        for (var j = 0; j < mutation.addedNodes[i].files.length; j++) {
-                                                                queueFileForDirectUpload(mutation.addedNodes[i].files[j]);
-                                                        }
-                                                }, { once: false });
-                                        }
-                                }
-                        });
-                };
-                if (observer2 != null) {
-                        observer2.disconnect();
-                }
-                observer2 = new MutationObserver(callback);
-                observer2.observe(document.getElementById('datasetForm:fileUpload'), config);
-        } //else ?
-}
-
-function sleep(ms) {
-        return new Promise(resolve => setTimeout(resolve, ms));
-}
-
-async function cancelDatasetCreate() {
-        //Page is going away - don't upload any more files, finish reporting current uploads, and then call cancelCreateCommand to clean up temp files
-        if (directUploadEnabled) {
-                fileList = [];
-                directUploadEnabled = false;
-                directUploadReport = false;
-                while (curFile != numDone) {
-                        $("#cancelCreate").prop('onclick', null).text("Cancel In Progress...").prop('disabled', true);
-                        $("#datasetForm\\:save").prop('disabled', true);
-                        await sleep(1000);
-                }
-                cancelCreateCommand();
-        } else {
-                cancelCreateCommand();
-        }
-}
-
-
-async function cancelDatasetEdit() {
-        //Don't upload any more files and don't send any more file entries to Dataverse, report any direct upload files that didn't get handled
-        if (directUploadEnabled) {
-                fileList = [];
-                directUploadEnabled = false;
-                directUploadReport = false;
-                        while (curFile != numDone) {
-                        $("#doneFilesButtonnop").prop('onclick', null).text("Cancel In Progress...").prop('disabled', true);
-                        await sleep(1000);
-                }
-                cancelEditCommand();
-        } else {
-                cancelEditCommand();
-        }
-}
-
-
-var inDataverseCall = false;
-
-var fileUpload = class fileUploadClass {
-        constructor(file) {
-                this.file = file;
-                this.state = UploadState.QUEUED;
-        }
-        async startRequestForDirectUploadUrl() {
-                this.state=UploadState.REQUESTING;
-                //Wait for each call to finish and update the DOM
-                while (inDataverseCall === true) {
-                        await sleep(delay);
-                }
-                inDataverseCall = true;
-                //storageId is not the location - has a : separator and no path elements from dataset
-                //(String uploadComponentId, String fullStorageIdentifier, String fileName, String contentType, String checksumType, String checksumValue)
-                requestDirectUploadUrls([{ name: 'fileSize', value: this.file.size }]);
-        }
-
-        async doUpload() {
-                this.state = UploadState.UPLOADING;
-                var thisFile = curFile-1;
-                this.id=thisFile;
-                //This appears to be the earliest point when the file table has been populated, and, since we don't know how many table entries have had ids added already, we check
-                var filerows = $('.ui-fileupload-files .ui-fileupload-row');
-                //Add an id attribute to each entry so we can later match progress and errors with the right entry
-                for (i = 0; i < filerows.length; i++) {
-                        var upid = filerows[i].getAttribute('upid');
-                        if (typeof upid === "undefined" || upid === null || upid === '') {
-                                var newUpId = getUpId();
-                                filerows[i].setAttribute('upid', newUpId);
-                        }
-                }
-                //Get the list of files to upload
-                var files = $('.ui-fileupload-files');
-                //Find the corresponding row (assumes that the file order and the order of rows is the same)
-                var fileNode = files.find("[upid='" + thisFile + "']");
-                //Decrement number queued for processing
-                filesInProgress = filesInProgress - 1;
-                var progBar = fileNode.find('.ui-fileupload-progress');
-                var cancelButton = fileNode.find('.ui-fileupload-cancel');
-                 var cancelled=false;
-                  $(cancelButton).click(function() {cancelled=true});
-                progBar.html('');
-                progBar.append($('<progress/>').attr('class', 'ui-progressbar ui-widget ui-widget-content ui-corner-all'));
-                if(this.urls.hasOwnProperty("url")) {
-                $.ajax({
-                        url: this.urls.url,
-                        headers: { "x-amz-tagging": "dv-state=temp" },
-                        type: 'PUT',
-                        data: this.file,
-                        context:this,
-                        cache: false,
-                        processData: false,
-                        success: function() {
-                                //ToDo - cancelling abandons the file. It is marked as temp so can be cleaned up later, but would be good to remove now (requires either sending a presigned delete URL or adding a callback to delete only a temp file
-                                if(!cancelled) {
-                                    this.reportUpload();
-                                }
-                        },
-                        error: function(jqXHR, textStatus, errorThrown) {
-                                console.log('Failure: ' + jqXHR.status);
-                                console.log('Failure: ' + errorThrown);
-                                uploadFailure(jqXHR, thisFile);
-                        },
-                        xhr: function() {
-                                var myXhr = $.ajaxSettings.xhr();
-                                if (myXhr.upload) {
-                                        myXhr.upload.addEventListener('progress', function(e) {
-                                                if (e.lengthComputable) {
-                                                        var doublelength = 2 * e.total;
-                                                        progBar.children('progress').attr({
-                                                                value: e.loaded,
-                                                                max: doublelength
-                                                        });
-                                                }
-                                        });
-                                }
-                                return myXhr;
-                        }
-                });
-                } else {
-                  var loaded=[];
-                  this.etags=[];
-                  this.numEtags=0;
-                  var doublelength = 2* this.file.size;
-                  var partSize= this.urls.partSize;
-                  var started=0;
-                  console.log('Num parts: ' + Object.keys(this.urls.urls).length);
-                  loaded[thisFile]=[];
-                  for (const [key, value] of Object.entries(this.urls.urls)) {
-                    if(!directUploadEnabled || cancelled) {
-                      //Direct upload has been cancelled - quit uploading new parts and abort this mp upload
-                      //once the parts in progress are done
-                      while((started-this.numEtags)>0) {
-                        await sleep(delay);
-                      }
-                      this.cancelMPUpload();
-                      directUploadFinished();
-                      break;
-                    }
-                    started=started+1;
-                    //Don't queue more than 10 parts at a time
-                    while((started-this.numEtags)>10) {
-                      await sleep(delay);
-                    }
-                    if(typeof this.etags[key] == 'undefined' || this.etags[key]==-1) {
-                       this.etags[key]=-1;
-                       var size = Math.min(partSize, this.file.size-(key-1)*partSize);
-                       var offset=(key-1)*partSize;
-                       var blob=this.file.slice(offset, offset + size);
-                       $.ajax({
-                        url: value,
-  //                      headers: { "x-amz-tagging": "dv-state=temp" },
-                        type: 'PUT',
-                        data: blob,
-                        context:this,
-                        cache: false,
-                        processData: false,
-                        success: function(data, status, response) {
-                                console.log('Successful upload of part ' + key + ' of ' + Object.keys(this.urls.urls).length);
-                                //The header has quotes around the eTag
-                                this.etags[key]=response.getResponseHeader('ETag').replace(/["]+/g, '');
-                                this.numEtags = this.numEtags+1;
-                                if(this.numEtags == Object.keys(this.urls.urls).length) {
-                                  this.multipartComplete();
-                                }
-                        },
-                        error: function(jqXHR, textStatus, errorThrown) {
-                                console.log('Failure: ' + jqXHR.status);
-                                console.log('Failure: ' + errorThrown);
-                                console.log(thisFile + ' : part' + key);
-                                this.numEtags = this.numEtags+1;
-                                this.etags[key]=-1;
-                                if(this.numEtags == Object.keys(this.urls.urls).length) {
-                                  this.multipartComplete();
-                                }
-                        },
-                        xhr: function() {
-                                var myXhr = $.ajaxSettings.xhr();
-                                if (myXhr.upload) {
-                                        myXhr.upload.addEventListener('progress', function(e) {
-                                                if (e.lengthComputable) {
-                                                        loaded[thisFile][key-1]=e.loaded;
-                                                        var total=0;
-                                                        for(let val of loaded[thisFile].values()) {
-                                                          //if parts with lower keys haven't reported yet, there could be undefined values in the array = skip those
-                                                          if(typeof val !== 'undefined') {
-                                                            total = total+val;
-                                                          }
-                                                        }
-                                                        progBar.children('progress').attr({
-                                                                value: total ,
-                                                                max: doublelength
-                                                        });
-                                                }
-                                        });
-                                }
-                                return myXhr;
-                        }
-                });
-              }
-            }
-          }
-        }
-
-        //All of the multipart part uploads have succeeded or failed. Here we decide whether to finish, retry, or cancel/abort 
-        multipartComplete() {
-          console.log('reporting file ' + this.file.name);
-          var allGood=true;
-          //Safety check - verify that all eTags were set
-          for(val in this.etags.values()) {
-            if (val==-1) {
-              allGood=false;
-              break;
-            }
-          }
-          if(!allGood) {
-            if(this.alreadyRetried) {
-              console.log('Error after retrying ' + this.file.name);
-              uploadFailure(jqXHR, thisFile);
-              this.cancelMPUpload();
-            } else {
-              this.alreadyRetried=true;
-              this.doUpload();
-            }
-          } else {
-            this.finishMPUpload();
-          }
-        }
-        
-        reportUpload() {
-                this.state = UploadState.UPLOADED;
-                console.log('S3 Upload complete for ' + this.file.name + ' : ' + this.storageId);
-                if (directUploadReport) {
-                        getMD5(this.file, prog => {
-                                var current = 1 + prog;
-                                $('[upid="' + this.id + '"] progress').attr({
-                                        value: current,
-                                        max: 2
-                                });
-                        }).then(md5 => {
-                                this.handleDirectUpload(md5);
-                        }, err => console.error(err));
-                }
-                else {
-                        console.log("Abandoned: " + this.storageId);
-                }
-        }
-        async cancelMPUpload() {
-               $.ajax({
-                        url: this.urls.abort,
-                        type: 'DELETE',
-                        context:this,
-                        cache: false,
-                        processData: false,
-                        success: function() {
-                          console.log('Successfully cancelled upload of ' + this.file.name);
-                        },
-                        error: function(jqXHR, textStatus, errorThrown) {
-                                console.log('Failure: ' + jqXHR.status);
-                                console.log('Failure: ' + errorThrown);
-
-                        }
-            });
-         }
-        async finishMPUpload() {
-          var eTagsObject={};
-          for(var i=1;i<=this.numEtags;i++) {
-             eTagsObject[i]=this.etags[i];
-          }
-               $.ajax({
-                        url: this.urls.complete,
-                        type: 'PUT',
-                        context:this,
-                        data: JSON.stringify(eTagsObject),
-                        cache: false,
-                        processData: false,
-                        success: function() {
-                          console.log('Successfully completed upload of ' + this.file.name);
-                          this.reportUpload();
-                        },
-                        error: function(jqXHR, textStatus, errorThrown) {
-                                console.log('Failure: ' + jqXHR.status);
-                                console.log('Failure: ' + errorThrown);
-
-                        }
-            });        }
-
-        async handleDirectUpload(md5) {
-                this.state = UploadState.HASHED;
-                //Wait for each call to finish and update the DOM
-                while (inDataverseCall === true) {
-                        await sleep(delay);
-                }
-                inDataverseCall = true;
-                //storageId is not the location - has a : separator and no path elements from dataset
-                //(String uploadComponentId, String fullStorageIdentifier, String fileName, String contentType, String checksumType, String checksumValue)
-                handleExternalUpload([{ name: 'uploadComponentId', value: 'datasetForm:fileUpload' }, { name: 'fullStorageIdentifier', value: this.storageId }, { name: 'fileName', value: this.file.name }, { name: 'contentType', value: this.file.type }, { name: 'checksumType', value: 'MD5' }, { name: 'checksumValue', value: md5 }]);
-        }
-}
-
-function queueFileForDirectUpload(file) {
-        if (fileList.length === 0) { uploadWidgetDropRemoveMsg(); }
-        var fUpload = new fileUpload(file);
-        fileList.push(fUpload);
-        //Fire off the first 4 to start (0,1,2,3)
-        if (filesInProgress < 4) {
-                filesInProgress = filesInProgress + 1;
-                fUpload.startRequestForDirectUploadUrl();
-        }
-}
-
-async function uploadFileDirectly(urls, storageId, filesize) {
-        await sleep(delay);
-        inDataverseCall = false;
-
-        if (directUploadEnabled) {
-                var upload = null;
-
-                //As long as we have the right file size, we're OK
-                for (i = 0; i < fileList.length; i++) {
-                        if (fileList[i].file.size == filesize) {
-                                upload = fileList.splice(i,1)[0];
-                                break;
-                        }
-                }
-                upload.urls = JSON.parse(urls);
-                upload.storageId = storageId;
-
-                //Increment count of files being processed
-                curFile = curFile + 1;
-                console.log('Uploading ' + upload.file.name + ' as ' + storageId + ' to ' + urls);
-
-                upload.doUpload();
-        }
-}
-
-
-
-
-function removeErrors() {
-        var errors = document.getElementsByClassName("ui-fileupload-error");
-        for (i = errors.length - 1; i >= 0; i--) {
-                errors[i].parentNode.removeChild(errors[i]);
-        }
-}
-
-var observer = null;
-
-function uploadStarted() {
-        // If this is not the first upload, remove error messages since
-        // the upload of any files that failed will be tried again.
-        removeErrors();
-        var curId = 0;
-        //Find the upload table body
-        var files = $('.ui-fileupload-files .ui-fileupload-row');
-        //Add an id attribute to each entry so we can later match errors with the right entry
-        for (i = 0; i < files.length; i++) {
-                files[i].setAttribute('upid', curId);
-                curId = curId + 1;
-        }
-        //Setup an observer to watch for additional rows being added
-        var config = { childList: true };
-        var callback = function(mutations) {
-                //Add an id attribute to all new entries
-                mutations.forEach(function(mutation) {
-                        for (i = 0; i < mutation.addedNodes.length; i++) {
-                                mutation.addedNodes[i].setAttribute('upid', curId);
-                                curId = curId + 1;
-                        }
-                        //Remove existing error messages since adding a new entry appears to cause a retry on previous entries
-                        removeErrors();
-                });
-        };
-        //uploadStarted appears to be called only once, but, if not, we should stop any current observer
-        if (observer != null) {
-                observer.disconnect();
-        }
-        observer = new MutationObserver(callback);
-        observer.observe(files[0].parentElement, config);
-}
-
-function uploadFinished(fileupload) {
-        if (fileupload.files.length === 0) {
-                $('button[id$="AllUploadsFinished"]').trigger('click');
-                //stop observer when we're done
-                if (observer != null) {
-                        observer.disconnect();
-                        observer = null;
-                }
-        }
-}
-
-async function directUploadFinished() {
-
-        numDone = finishFile();
-        var total = curFile;
-        var inProgress = filesInProgress;
-        var inList = fileList.length;
-        console.log(inList + ' : ' + numDone + ' : ' + total + ' : ' + inProgress);
-        if (directUploadEnabled) {
-                if (inList === 0) {
-                        if (total === numDone) {
-                                $('button[id$="AllUploadsFinished"]').trigger('click');
-                                //stop observer when we're done
-                                if (observer != null) {
-                                        observer.disconnect();
-                                        observer = null;
-                                }
-                        }
-                } else {
-                        if ((inProgress < 4) && (inProgress < inList)) {
-                                filesInProgress = filesInProgress + 1;
-                                for (i = 0; i < fileList.length; i++) {
-                                  if(fileList[i].state==UploadState.QUEUED) {
-                                    fileList[i].startRequestForDirectUploadUrl();
-                                    break;
-                                  }
-                                 }
-                        }
-                }
-        }
-        await sleep(delay);
-
-        inDataverseCall = false;
-}
-
-async function uploadFailure(jqXHR, upid, filename) {
-        // This handles HTTP errors (non-20x reponses) such as 0 (no connection at all), 413 (Request too large),
-        // and 504 (Gateway timeout) where the upload call to the server fails (the server doesn't receive the request)
-        // It notifies the user and provides info about the error (status, statusText)
-        // On some browsers, the status is available in an event: window.event.srcElement.status
-        // but others, (Firefox) don't support this. The calls below retrieve the status and other info
-        // from the call stack instead (arguments to the fail() method that calls onerror() that calls this function
-
-        if (directUploadEnabled) {
-                await sleep(delay);
-        }
-        inDataverseCall = false;
-
-        //Retrieve the error number (status) and related explanation (statusText)
-        var status = 0;
-        var statusText = null;
-
-        // There are various metadata available about which file the error pertains to
-        // including the name and size.
-        // However, since the table rows created by PrimeFaces only show name and approximate size,
-        // these may not uniquely identify the affected file. Therefore, we set a unique upid attribute
-        // in uploadStarted (and the MutationObserver there) and look for that here. The files array has
-        // only one element and that element includes a description of the row involved, including it's upid.
-
-        var name = null;
-        var id = null;
-        if(jqXHR=== null) {
-          status=1;  //made up
-          statusText='Aborting';
-        } else if ((typeof jqXHR !== 'undefined')) {
-                status = jqXHR.status;
-                statusText = jqXHR.statusText;
-                id = upid;
-                name = filename;
-        } else {
-                try {
-                        name = arguments.callee.caller.caller.arguments[1].files[0].name;
-                        id = arguments.callee.caller.caller.arguments[1].files[0].row[0].attributes.upid.value;
-                        status = arguments.callee.caller.caller.arguments[1].jqXHR.status;
-                        statusText = arguments.callee.caller.caller.arguments[1].jqXHR.statusText;
-                } catch(err) {
-                        console.log("Unable to determine status for error - assuming network issue");
-                        console.log("Exception: " + err.message);
-                }
-        }
-
-        //statusText for error 0 is the unhelpful 'error'
-        if (status == 0) statusText = 'Network Error';
-
-        //Log the error
-        console.log('Upload error:' + name + ' upid=' + id + ', Error ' + status + ': ' + statusText);
-        //Find the table
-        var rows = $('.ui-fileupload-files .ui-fileupload-row');
-        //Create an error element
-        var node = document.createElement("TD");
-        //Add a class to make finding these errors easy
-        node.classList.add('ui-fileupload-error');
-        //Add the standard error message class for formatting purposes
-        node.classList.add('ui-message-error');
-        var textnode = document.createTextNode("Upload unsuccessful (" + status + ": " + statusText + ").");
-        node.appendChild(textnode);
-        //Add the error message to the correct row
-        for (i = 0; i < rows.length; i++) {
-                if (rows[i].getAttribute('upid') == id) {
-                        //Remove any existing error message/only show last error (have seen two error 0 from one network disconnect)
-                        var err = rows[i].getElementsByClassName('ui-fileupload-error');
-                        if (err.length != 0) {
-                                err[0].remove();
-                        }
-                        rows[i].appendChild(node);
-                        break;
-                }
-        }
-        if (directUploadEnabled) {
-                //Mark this file as processed and keep processing further files
-                directUploadFinished();
-        }
-}
-//MD5 Hashing functions
-
-function readChunked(file, chunkCallback, endCallback) {
-        var fileSize = file.size;
-        var chunkSize = 64 * 1024 * 1024; // 64MB
-        var offset = 0;
-
-        var reader = new FileReader();
-        reader.onload = function() {
-                if (reader.error) {
-                        endCallback(reader.error || {});
-                        return;
-                }
-                offset += reader.result.length;
-                // callback for handling read chunk
-                // TODO: handle errors
-                chunkCallback(reader.result, offset, fileSize);
-                if (offset >= fileSize) {
-                        endCallback(null);
-                        return;
-                }
-                readNext();
-        };
-
-        reader.onerror = function(err) {
-                endCallback(err || {});
-        };
-
-        function readNext() {
-                var fileSlice = file.slice(offset, offset + chunkSize);
-                reader.readAsBinaryString(fileSlice);
-        }
-        readNext();
-}
-
-function getMD5(blob, cbProgress) {
-        return new Promise((resolve, reject) => {
-                var md5 = CryptoJS.algo.MD5.create();
-                readChunked(blob, (chunk, offs, total) => {
-                        md5.update(CryptoJS.enc.Latin1.parse(chunk));
-                        if (cbProgress) {
-                                cbProgress(offs / total);
-                        }
-                }, err => {
-                        if (err) {
-                                reject(err);
-                        } else {
-                                // TODO: Handle errors
-                                var hash = md5.finalize();
-                                var hashHex = hash.toString(CryptoJS.enc.Hex);
-                                resolve(hashHex);
-                        }
-                });
-        });
+var fileList = [];
+var observer2 = null;
+var numDone = 0;
+var delay = 100; //milliseconds
+var UploadState = {
+        QUEUED: 'queued',
+        REQUESTING: 'requesting',
+        UPLOADING: 'uploading',
+        UPLOADED: 'uploaded',
+        HASHED: 'hashed',
+        FINISHED: 'finished',
+        FAILED: 'failed'
+};
+//true indicates direct upload is being used, but cancel may set it back to false at which point direct upload functions should not do further work
+var directUploadEnabled = false;
+
+var directUploadReport = true;
+
+var checksumAlgName;
+
+//How many files have started being processed but aren't yet being uploaded
+var filesInProgress = 0;
+//The # of the current file being processed (total number of files for which upload has at least started)
+var curFile = 0;
+//The number of upload ids that have been assigned in the files table
+var getUpId = (function() {
+        var counter = -1;
+    return function () {
+        counter += 1;
+        return counter;
+    };
+})();
+//How many files are completely done
+var finishFile = (function() {
+        var counter = 0;
+    return function () {
+        counter += 1;
+        return counter;
+    };
+})();
+
+
+function setupDirectUpload(enabled) {
+    if (enabled) {
+        directUploadEnabled = true;
+        //An indicator as to which version is being used - should keep updated.
+        console.log('Dataverse Direct Upload for v5.0');
+        $('.ui-fileupload-upload').hide();
+        $('.ui-fileupload-cancel').hide();
+
+        fetch("api/files/fixityAlgorithm")
+            .then((response) => {
+                if (!response.ok) {
+                    console.log("Did not get fixityAlgorithm from Dataverse, using MD5");
+                    return null;
+                } else {
+                    return response.json();
+                }
+            }).then(checksumAlgJson => {
+                checksumAlgName = "MD5";
+                if (checksumAlgJson != null) {
+                    checksumAlgName = checksumAlgJson.data.message;
+                }
+            })
+            .then(() => {
+                //Catch files entered via upload dialog box. Since this 'select' widget is replaced by PF, we need to add a listener again when it is replaced
+                var fileInput = document.getElementById('datasetForm:fileUpload_input');
+                if (fileInput !== null) {
+                    fileInput.addEventListener('change', function(event) {
+                        fileList = [];
+                        for (var i = 0; i < fileInput.files.length; i++) {
+                            queueFileForDirectUpload(fileInput.files[i]);
+                        }
+                    }, { once: false });
+                }
+                //Add support for drag and drop. Since the fileUploadForm is not replaced by PF, catching changes with a mutationobserver isn't needed
+                var fileDropWidget = document.getElementById('datasetForm:fileUpload');
+                fileDropWidget.addEventListener('drop', function(event) {
+                    fileList = [];
+                    for (var i = 0; i < event.dataTransfer.files.length; i++) {
+                        queueFileForDirectUpload(event.dataTransfer.files[i]);
+                    }
+                }, { once: false });
+
+                var config = { childList: true };
+                var callback = function(mutations) {
+                    mutations.forEach(function(mutation) {
+                        for (i = 0; i < mutation.addedNodes.length; i++) {
+                            //Add a listener on any replacement file 'select' widget
+                    if (mutation.addedNodes[i].id === 'datasetForm:fileUpload_input') {
+                                fileInput = mutation.addedNodes[i];
+                                mutation.addedNodes[i].addEventListener('change', function(event) {
+                                    for (var j = 0; j < mutation.addedNodes[i].files.length; j++) {
+                                        queueFileForDirectUpload(mutation.addedNodes[i].files[j]);
+                                    }
+                                }, { once: false });
+                            }
+                        }
+                    });
+                };
+        if (observer2 !== null) {
+                    observer2.disconnect();
+                }
+                observer2 = new MutationObserver(callback);
+                observer2.observe(document.getElementById('datasetForm:fileUpload'), config);
+            });
+    }//else ?
+}
+
+function sleep(ms) {
+        return new Promise(resolve => setTimeout(resolve, ms));
+}
+
+async function cancelDatasetCreate() {
+        //Page is going away - don't upload any more files, finish reporting current uploads, and then call cancelCreateCommand to clean up temp files
+        if (directUploadEnabled) {
+                fileList = [];
+                directUploadEnabled = false;
+                directUploadReport = false;
+        while (curFile !== numDone) {
+                        $("#cancelCreate").prop('onclick', null).text("Cancel In Progress...").prop('disabled', true);
+                        $("#datasetForm\\:save").prop('disabled', true);
+                        await sleep(1000);
+                }
+                cancelCreateCommand();
+        } else {
+                cancelCreateCommand();
+        }
+}
+
+
+async function cancelDatasetEdit() {
+        //Don't upload any more files and don't send any more file entries to Dataverse, report any direct upload files that didn't get handled
+        if (directUploadEnabled) {
+                fileList = [];
+                directUploadEnabled = false;
+                directUploadReport = false;
+        while (curFile !== numDone) {
+                        $("#doneFilesButtonnop").prop('onclick', null).text("Cancel In Progress...").prop('disabled', true);
+                        await sleep(1000);
+                }
+                cancelEditCommand();
+        } else {
+                cancelEditCommand();
+        }
+}
+
+
+var inDataverseCall = false;
+
+var fileUpload = class fileUploadClass {
+        constructor(file) {
+                this.file = file;
+                this.state = UploadState.QUEUED;
+        }
+        async startRequestForDirectUploadUrl() {
+                this.state=UploadState.REQUESTING;
+                //Wait for each call to finish and update the DOM
+                while (inDataverseCall === true) {
+                        await sleep(delay);
+                }
+                inDataverseCall = true;
+                //storageId is not the location - has a : separator and no path elements from dataset
+                //(String uploadComponentId, String fullStorageIdentifier, String fileName, String contentType, String checksumType, String checksumValue)
+                requestDirectUploadUrls([{ name: 'fileSize', value: this.file.size }]);
+        }
+
+        async doUpload() {
+                this.state = UploadState.UPLOADING;
+                var thisFile = curFile-1;
+                this.id=thisFile;
+                //This appears to be the earliest point when the file table has been populated, and, since we don't know how many table entries have had ids added already, we check
+                var filerows = $('.ui-fileupload-files .ui-fileupload-row');
+                //Add an id attribute to each entry so we can later match progress and errors with the right entry
+                for (i = 0; i < filerows.length; i++) {
+                        var upid = filerows[i].getAttribute('upid');
+                        if (typeof upid === "undefined" || upid === null || upid === '') {
+                                var newUpId = getUpId();
+                                filerows[i].setAttribute('upid', newUpId);
+                        }
+                }
+                //Get the list of files to upload
+                var files = $('.ui-fileupload-files');
+                //Find the corresponding row (assumes that the file order and the order of rows is the same)
+                var fileNode = files.find("[upid='" + thisFile + "']");
+                //Decrement number queued for processing
+                filesInProgress = filesInProgress - 1;
+                var progBar = fileNode.find('.ui-fileupload-progress');
+                var cancelButton = fileNode.find('.ui-fileupload-cancel');
+                 var cancelled=false;
+                  $(cancelButton).click(function() {cancelled=true});
+                progBar.html('');
+                progBar.append($('<progress/>').attr('class', 'ui-progressbar ui-widget ui-widget-content ui-corner-all'));
+                if(this.urls.hasOwnProperty("url")) {
+                $.ajax({
+                        url: this.urls.url,
+                        headers: { "x-amz-tagging": "dv-state=temp" },
+                        type: 'PUT',
+                        data: this.file,
+                        context:this,
+                        cache: false,
+                        processData: false,
+                        success: function() {
+                                //ToDo - cancelling abandons the file. It is marked as temp so can be cleaned up later, but would be good to remove now (requires either sending a presigned delete URL or adding a callback to delete only a temp file
+                                if(!cancelled) {
+                                    this.reportUpload();
+                                }
+                        },
+                        error: function(jqXHR, textStatus, errorThrown) {
+                                console.log('Failure: ' + jqXHR.status);
+                                console.log('Failure: ' + errorThrown);
+                                uploadFailure(jqXHR, thisFile);
+                        },
+                        xhr: function() {
+                                var myXhr = $.ajaxSettings.xhr();
+                                if (myXhr.upload) {
+                                        myXhr.upload.addEventListener('progress', function(e) {
+                                                if (e.lengthComputable) {
+                                                        var doublelength = 2 * e.total;
+                                                        progBar.children('progress').attr({
+                                                                value: e.loaded,
+                                                                max: doublelength
+                                                        });
+                                                }
+                                        });
+                                }
+                                return myXhr;
+                        }
+                });
+                } else {
+                  var loaded=[];
+                  this.etags=[];
+                  this.numEtags=0;
+                  var doublelength = 2* this.file.size;
+                  var partSize= this.urls.partSize;
+                  var started=0;
+                  console.log('Num parts: ' + Object.keys(this.urls.urls).length);
+                  loaded[thisFile]=[];
+                  for (const [key, value] of Object.entries(this.urls.urls)) {
+                    if(!directUploadEnabled || cancelled) {
+                      //Direct upload has been cancelled - quit uploading new parts and abort this mp upload
+                      //once the parts in progress are done
+                      while((started-this.numEtags)>0) {
+                        await sleep(delay);
+                      }
+                      this.cancelMPUpload();
+                      directUploadFinished();
+                      break;
+                    }
+                    started=started+1;
+                    //Don't queue more than 10 parts at a time
+                    while((started-this.numEtags)>10) {
+                      await sleep(delay);
+                    }
+                if (typeof this.etags[key] === 'undefined' || this.etags[key] == -1) {
+                       this.etags[key]=-1;
+                       var size = Math.min(partSize, this.file.size-(key-1)*partSize);
+                       var offset=(key-1)*partSize;
+                       var blob=this.file.slice(offset, offset + size);
+                       $.ajax({
+                        url: value,
+  //                      headers: { "x-amz-tagging": "dv-state=temp" },
+                        type: 'PUT',
+                        data: blob,
+                        context:this,
+                        cache: false,
+                        processData: false,
+                        success: function(data, status, response) {
+                                console.log('Successful upload of part ' + key + ' of ' + Object.keys(this.urls.urls).length);
+                                //The header has quotes around the eTag
+                                this.etags[key]=response.getResponseHeader('ETag').replace(/["]+/g, '');
+                                this.numEtags = this.numEtags+1;
+                            if (this.numEtags === Object.keys(this.urls.urls).length) {
+                                  this.multipartComplete();
+                                }
+                        },
+                        error: function(jqXHR, textStatus, errorThrown) {
+                                console.log('Failure: ' + jqXHR.status);
+                                console.log('Failure: ' + errorThrown);
+                                console.log(thisFile + ' : part' + key);
+                                this.numEtags = this.numEtags+1;
+                                this.etags[key]=-1;
+                            if (this.numEtags === Object.keys(this.urls.urls).length) {
+                                  this.multipartComplete();
+                                }
+                        },
+                        xhr: function() {
+                                var myXhr = $.ajaxSettings.xhr();
+                                if (myXhr.upload) {
+                                        myXhr.upload.addEventListener('progress', function(e) {
+                                                if (e.lengthComputable) {
+                                                        loaded[thisFile][key-1]=e.loaded;
+                                                        var total=0;
+                                                        for(let val of loaded[thisFile].values()) {
+                                                          //if parts with lower keys haven't reported yet, there could be undefined values in the array = skip those
+                                                          if(typeof val !== 'undefined') {
+                                                            total = total+val;
+                                                          }
+                                                        }
+                                                        progBar.children('progress').attr({
+                                                                value: total ,
+                                                                max: doublelength
+                                                        });
+                                                }
+                                        });
+                                }
+                                return myXhr;
+                        }
+                });
+              }
+            }
+          }
+        }
+
+        //All of the multipart part uploads have succeeded or failed. Here we decide whether to finish, retry, or cancel/abort 
+        multipartComplete() {
+          console.log('reporting file ' + this.file.name);
+          var allGood=true;
+          //Safety check - verify that all eTags were set
+          for (let val in this.etags.values()) {
+            if (val == -1) {
+              allGood=false;
+              break;
+            }
+          }
+          if(!allGood) {
+            if(this.alreadyRetried) {
+              console.log('Error after retrying ' + this.file.name);
+              uploadFailure(jqXHR, thisFile);
+              this.cancelMPUpload();
+            } else {
+              this.alreadyRetried=true;
+              this.doUpload();
+            }
+          } else {
+            this.finishMPUpload();
+          }
+        }
+        
+        reportUpload() {
+                this.state = UploadState.UPLOADED;
+                console.log('S3 Upload complete for ' + this.file.name + ' : ' + this.storageId);
+                if (directUploadReport) {
+                        getChecksum(this.file, prog => {
+                                var current = 1 + prog;
+                                $('[upid="' + this.id + '"] progress').attr({
+                                        value: current,
+                                        max: 2
+                                });
+                        }).then(checksum => {
+                                this.handleDirectUpload(checksum);
+                        }).catch(err => console.error(err));
+                }
+                else {
+                        console.log("Abandoned: " + this.storageId);
+                }
+        }
+        async cancelMPUpload() {
+               $.ajax({
+                        url: this.urls.abort,
+                        type: 'DELETE',
+                        context:this,
+                        cache: false,
+                        processData: false,
+                        success: function() {
+                          console.log('Successfully cancelled upload of ' + this.file.name);
+                        },
+                        error: function(jqXHR, textStatus, errorThrown) {
+                                console.log('Failure: ' + jqXHR.status);
+                                console.log('Failure: ' + errorThrown);
+
+                        }
+            });
+         }
+        async finishMPUpload() {
+          var eTagsObject={};
+          for(var i=1;i<=this.numEtags;i++) {
+             eTagsObject[i]=this.etags[i];
+          }
+               $.ajax({
+                        url: this.urls.complete,
+                        type: 'PUT',
+                        context:this,
+                        data: JSON.stringify(eTagsObject),
+                        cache: false,
+                        processData: false,
+                        success: function() {
+                          console.log('Successfully completed upload of ' + this.file.name);
+                          this.reportUpload();
+                        },
+                        error: function(jqXHR, textStatus, errorThrown) {
+                                console.log('Failure: ' + jqXHR.status);
+                                console.log('Failure: ' + errorThrown);
+
+                        }
+            });        }
+
+        async handleDirectUpload(checksum) {
+                this.state = UploadState.HASHED;
+                //Wait for each call to finish and update the DOM
+                while (inDataverseCall === true) {
+                        await sleep(delay);
+                }
+                inDataverseCall = true;
+                //storageId is not the location - has a : separator and no path elements from dataset
+                //(String uploadComponentId, String fullStorageIdentifier, String fileName, String contentType, String checksumType, String checksumValue)
+                handleExternalUpload([{ name: 'uploadComponentId', value: 'datasetForm:fileUpload' }, { name: 'fullStorageIdentifier', value: this.storageId }, { name: 'fileName', value: this.file.name }, { name: 'contentType', value: this.file.type }, { name: 'checksumType', value: checksumAlgName }, { name: 'checksumValue', value: checksum }]);
+        }
+}
+
+function queueFileForDirectUpload(file) {
+        if (fileList.length === 0) { uploadWidgetDropRemoveMsg(); }
+        var fUpload = new fileUpload(file);
+        fileList.push(fUpload);
+        //Fire off the first 4 to start (0,1,2,3)
+        if (filesInProgress < 4) {
+                filesInProgress = filesInProgress + 1;
+                fUpload.startRequestForDirectUploadUrl();
+        }
+}
+
+async function uploadFileDirectly(urls, storageId, filesize) {
+        await sleep(delay);
+        inDataverseCall = false;
+
+        if (directUploadEnabled) {
+                var upload = null;
+
+                //As long as we have the right file size, we're OK
+                for (i = 0; i < fileList.length; i++) {
+                    if (fileList[i].file.size == filesize) {
+                        upload = fileList.splice(i,1)[0];
+                        break;
+                    }
+                }
+                upload.urls = JSON.parse(urls);
+                upload.storageId = storageId;
+
+                //Increment count of files being processed
+                curFile = curFile + 1;
+                console.log('Uploading ' + upload.file.name + ' as ' + storageId + ' to ' + urls);
+
+                upload.doUpload();
+        }
+}
+
+
+
+
+function removeErrors() {
+        var errors = document.getElementsByClassName("ui-fileupload-error");
+        for (i = errors.length - 1; i >= 0; i--) {
+                errors[i].parentNode.removeChild(errors[i]);
+        }
+}
+
+var observer = null;
+
+// uploadStarted and uploadFinished are not related to direct upload.
+// They deal with clearing old errors and watching for new ones and then signaling when all uploads are done
+function uploadStarted() {
+        // If this is not the first upload, remove error messages since
+        // the upload of any files that failed will be tried again.
+        removeErrors();
+        var curId = 0;
+        //Find the upload table body
+        var files = $('.ui-fileupload-files .ui-fileupload-row');
+        //Add an id attribute to each entry so we can later match errors with the right entry
+        for (i = 0; i < files.length; i++) {
+                files[i].setAttribute('upid', curId);
+                curId = curId + 1;
+        }
+        //Setup an observer to watch for additional rows being added
+        var config = { childList: true };
+        var callback = function(mutations) {
+                //Add an id attribute to all new entries
+                mutations.forEach(function(mutation) {
+                        for (i = 0; i < mutation.addedNodes.length; i++) {
+                                mutation.addedNodes[i].setAttribute('upid', curId);
+                                curId = curId + 1;
+                        }
+                        //Remove existing error messages since adding a new entry appears to cause a retry on previous entries
+                        removeErrors();
+                });
+        };
+        //uploadStarted appears to be called only once, but, if not, we should stop any current observer
+    if (observer !== null) {
+                observer.disconnect();
+        }
+        observer = new MutationObserver(callback);
+        observer.observe(files[0].parentElement, config);
+}
+
+function uploadFinished(fileupload) {
+        if (fileupload.files.length === 0) {
+                $('button[id$="AllUploadsFinished"]').trigger('click');
+                //stop observer when we're done
+        if (observer !== null) {
+                        observer.disconnect();
+                        observer = null;
+                }
+        }
+}
+
+async function directUploadFinished() {
+
+        numDone = finishFile();
+        var total = curFile;
+        var inProgress = filesInProgress;
+        var inList = fileList.length;
+        console.log(inList + ' : ' + numDone + ' : ' + total + ' : ' + inProgress);
+        if (directUploadEnabled) {
+                if (inList === 0) {
+                        if (total === numDone) {
+                                $('button[id$="AllUploadsFinished"]').trigger('click');
+                                //stop observer when we're done
+                if (observer !== null) {
+                                        observer.disconnect();
+                                        observer = null;
+                                }
+                        }
+                } else {
+                        if ((inProgress < 4) && (inProgress < inList)) {
+                                filesInProgress = filesInProgress + 1;
+                                for (i = 0; i < fileList.length; i++) {
+                    if (fileList[i].state === UploadState.QUEUED) {
+                                    fileList[i].startRequestForDirectUploadUrl();
+                                    break;
+                                  }
+                                 }
+                        }
+                }
+        }
+        await sleep(delay);
+
+        inDataverseCall = false;
+}
+
+async function uploadFailure(jqXHR, upid, filename) {
+        // This handles HTTP errors (non-20x reponses) such as 0 (no connection at all), 413 (Request too large),
+        // and 504 (Gateway timeout) where the upload call to the server fails (the server doesn't receive the request)
+        // It notifies the user and provides info about the error (status, statusText)
+        // On some browsers, the status is available in an event: window.event.srcElement.status
+        // but others, (Firefox) don't support this. The calls below retrieve the status and other info
+        // from the call stack instead (arguments to the fail() method that calls onerror() that calls this function
+
+        if (directUploadEnabled) {
+                await sleep(delay);
+        }
+        inDataverseCall = false;
+
+        //Retrieve the error number (status) and related explanation (statusText)
+        var status = 0;
+        var statusText = null;
+
+        // There are various metadata available about which file the error pertains to
+        // including the name and size.
+        // However, since the table rows created by PrimeFaces only show name and approximate size,
+        // these may not uniquely identify the affected file. Therefore, we set a unique upid attribute
+        // in uploadStarted (and the MutationObserver there) and look for that here. The files array has
+        // only one element and that element includes a description of the row involved, including it's upid.
+
+        var name = null;
+        var id = null;
+        if(jqXHR=== null) {
+          status=1;  //made up
+          statusText='Aborting';
+        } else if ((typeof jqXHR !== 'undefined')) {
+                status = jqXHR.status;
+                statusText = jqXHR.statusText;
+                id = upid;
+                name = filename;
+        } else {
+                try {
+                        name = arguments.callee.caller.caller.arguments[1].files[0].name;
+                        id = arguments.callee.caller.caller.arguments[1].files[0].row[0].attributes.upid.value;
+                        status = arguments.callee.caller.caller.arguments[1].jqXHR.status;
+                        statusText = arguments.callee.caller.caller.arguments[1].jqXHR.statusText;
+                } catch(err) {
+                        console.log("Unable to determine status for error - assuming network issue");
+                        console.log("Exception: " + err.message);
+                }
+        }
+
+        //statusText for error 0 is the unhelpful 'error'
+    if (status == 0)
+        statusText = 'Network Error';
+        //Log the error
+        console.log('Upload error:' + name + ' upid=' + id + ', Error ' + status + ': ' + statusText);
+        //Find the table
+        var rows = $('.ui-fileupload-files .ui-fileupload-row');
+        //Create an error element
+        var node = document.createElement("TD");
+        //Add a class to make finding these errors easy
+        node.classList.add('ui-fileupload-error');
+        //Add the standard error message class for formatting purposes
+        node.classList.add('ui-message-error');
+        var textnode = document.createTextNode("Upload unsuccessful (" + status + ": " + statusText + ").");
+        node.appendChild(textnode);
+        //Add the error message to the correct row
+        for (i = 0; i < rows.length; i++) {
+        if (rows[i].getAttribute('upid') === id) {
+            //Remove any existing error message/only show last error (have seen two error 0 from one network disconnect)
+            var err = rows[i].getElementsByClassName('ui-fileupload-error');
+            if (err.length !== 0) {
+                err[0].remove();
+            }
+                rows[i].appendChild(node);
+                break;
+            }
+        }
+        if (directUploadEnabled) {
+                //Mark this file as processed and keep processing further files
+                directUploadFinished();
+        }
+}
+//MD5 Hashing functions
+
+function readChunked(file, chunkCallback, endCallback) {
+        var fileSize = file.size;
+        var chunkSize = 64 * 1024 * 1024; // 64MB
+        var offset = 0;
+
+        var reader = new FileReader();
+        reader.onload = function() {
+                if (reader.error) {
+                        endCallback(reader.error || {});
+                        return;
+                }
+                offset += reader.result.length;
+                // callback for handling read chunk
+                // TODO: handle errors
+                chunkCallback(reader.result, offset, fileSize);
+                if (offset >= fileSize) {
+                        endCallback(null);
+                        return;
+                }
+                readNext();
+        };
+
+        reader.onerror = function(err) {
+                endCallback(err || {});
+        };
+
+        function readNext() {
+                var fileSlice = file.slice(offset, offset + chunkSize);
+                reader.readAsBinaryString(fileSlice);
+        }
+        readNext();
+}
+function getChecksum(blob, cbProgress) {
+        return new Promise((resolve, reject) => {
+
+
+        var checksumAlg; 
+                        switch (checksumAlgName) {
+                    case 'MD5':
+                        checksumAlg = CryptoJS.algo.MD5.create();
+                        break;
+                    case 'SHA-1':
+                        checksumAlg = CryptoJS.algo.SHA1.create();
+                        break;
+                    case 'SHA-256':
+                        checksumAlg = CryptoJS.algo.SHA256.create();
+                        break;
+                    case 'SHA-512':
+                        checksumAlg = CryptoJS.algo.SHA512.create();
+                        break;
+                    default:
+                        console.log('$(checksumAlgName) is not supported, using MD5 as the checksumAlg checksum Algorithm');
+                                checksumAlg = CryptoJS.algo.MD5.create();
+                }
+                readChunked(blob, (chunk, offs, total) => {
+                        checksumAlg.update(CryptoJS.enc.Latin1.parse(chunk));
+                        if (cbProgress) {
+                                cbProgress(offs / total);
+                        }
+                }, err => {
+                        if (err) {
+                                reject(err);
+                        } else {
+                                // TODO: Handle errors
+                                var hash = checksumAlg.finalize();
+                                var hashHex = hash.toString(CryptoJS.enc.Hex);
+                                resolve(hashHex);
+                        }
+                });
+        });
 }
\ No newline at end of file
diff --git a/src/main/webapp/resources/js/mydata.js b/src/main/webapp/resources/js/mydata.js
index 182df7cc62f..899ba6637e2 100644
--- a/src/main/webapp/resources/js/mydata.js
+++ b/src/main/webapp/resources/js/mydata.js
@@ -27,6 +27,7 @@ function bind_checkbox_labels(){
     bind_checkbox_labels_by_names('mydata_dvobject_label', 'div_dvobject_types');
     bind_checkbox_labels_by_names('mydata_pubstate_label', 'div_published_states');
     bind_checkbox_labels_by_names('mydata_role_label', 'div_role_states');
+    bind_checkbox_labels_by_names('mydata_validity_label', 'div_dataset_valid');
 
 }
 
@@ -493,6 +494,12 @@ function submit_my_data_search(){
                     y[i].innerHTML = deaccessioned;
                 }
             }
+            if ($("span.label.incomplete")) {
+                var y = $("span.label.incomplete");
+                for (var i = 0; i < y.length; i++) {
+                    y[i].innerHTML = incomplete;
+                }
+            }
 
 
             // --------------------------------
diff --git a/src/main/webapp/search-include-fragment.xhtml b/src/main/webapp/search-include-fragment.xhtml
index a6e344afb8c..af568170157 100644
--- a/src/main/webapp/search-include-fragment.xhtml
+++ b/src/main/webapp/search-include-fragment.xhtml
@@ -9,7 +9,7 @@
      xmlns:jsf="http://xmlns.jcp.org/jsf">
 
     <c:set var="page" value="/dataverse/#{DataversePage.dataverse.alias}"/>
-    <c:set var="cvocConf" value="#{settingsWrapper.getCVocConf()}"/>
+    <c:set var="cvocConf" value="#{settingsWrapper.getCVocConf(true)}"/>
     <p:fragment>
         <p:autoUpdate/>
         <ui:repeat value="#{DatasetPage.vocabScripts}" var="vocabScriptUrl">
@@ -28,7 +28,7 @@
                         <p:watermark for="searchBasic" value="#{dataverseRedirectPage == 'dataverseuser.xhtml' ? bundle['account.search.input.watermark'] : bundle['dataverse.search.input.watermark']}"/>
 
                         <span class="input-group-btn">
-                            <p:commandLink id="searchbutton" title="#{bundle['dataverse.search.btn.find']}" styleClass="btn btn-default bootstrap-button-tooltip" 
+                            <p:commandLink id="searchbutton" title="#{bundle['dataverse.search.btn.find']}" ariaLabel="#{bundle['dataverse.search.btn.find']}" styleClass="btn btn-default bootstrap-button-tooltip" 
                                            action="#{SearchIncludeFragment.searchRedirect(dataverseRedirectPage, DataversePage.dataverse)}">
                                 <span class="glyphicon glyphicon-search no-text"/>
                             </p:commandLink>
@@ -36,7 +36,7 @@
 
                         <p:remoteCommand name="submitsearch" action="#{SearchIncludeFragment.searchRedirect(dataverseRedirectPage, DataversePage.dataverse)}"/>
                     </div>
-                    <h:outputLink id="advsearchlink" value="#{widgetWrapper.wrapURL('/search/advanced.xhtml')}">
+                    <h:outputLink rel="nofollow" id="advsearchlink" value="#{widgetWrapper.wrapURL('/search/advanced.xhtml')}">
                         <f:param name="dataverseIdentifier" value="#{SearchIncludeFragment.dataverse.alias}"/>
                         <h:outputText value="#{bundle['dataverse.search.advancedSearch']}"/>
                         <!--<f:param name="q" value="#{SearchIncludeFragment.query}" disable="#{empty SearchIncludeFragment.query}"/>-->
@@ -88,15 +88,26 @@
             <pre><h:outputText value="#{SearchIncludeFragment.searchException.cause.message}"/></pre>
         </div>
     </div>
+    
+    
+    <!-- ... And the message block for when SOLR IS DOWN *TEMPORARILY* -->
+    <div class="row" jsf:rendered="#{SearchIncludeFragment.solrTemporarilyUnavailable}">
+        <div class="col-sm-12">
+            <div class="bg-danger padding-12 margin-bottom">
+                <h:outputText value="#{bundle['dataverse.results.solrIsTemporarilyUnavailable']}" styleClass="highlightBold"/>
+            </div>
+            <pre><h:outputText value="#{bundle['dataverse.results.solrIsTemporarilyUnavailable.extraText']}"/></pre>
+        </div>
+    </div>
 
     <!-- Search Results / SOLR IS UP -->
-    <div id="search-row-panel" class="row row-offcanvas row-offcanvas-left" jsf:rendered="#{!SearchIncludeFragment.solrIsDown}">
+    <div id="search-row-panel" class="row row-offcanvas row-offcanvas-left" jsf:rendered="#{!(SearchIncludeFragment.solrIsDown or SearchIncludeFragment.solrTemporarilyUnavailable)}">
         <div id="dv-sidebar" class="col-sm-4 col-md-3 sidebar-offcanvas">
             <!--TYPE FACET (DATAVERSES, DATASETS, FILES)-->
-            <h:form id="facetType">
+            <h:form id="facetType" rendered="#{!SearchIncludeFragment.facetsDisabled}">
                 <!--DATAVERSE TOGGLE-->
                 <div class="clearfix">
-                    <h:outputLink value="#{widgetWrapper.wrapURL(page)}" styleClass="facetTypeChBox facetLink #{SearchIncludeFragment.selectedTypesList.contains('dataverses') ? 'facetSelected': ''}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('dataverses') == null}">
+                    <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" styleClass="facetTypeChBox facetLink #{SearchIncludeFragment.selectedTypesList.contains('dataverses') ? 'facetSelected': ''}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('dataverses') == null}">
                         <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                         <f:param name="types" value="#{SearchIncludeFragment.getNewSelectedTypes('dataverses')}"/>
                         <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -105,10 +116,11 @@
                         <f:param name="sort" value="#{SearchIncludeFragment.sortField}"/>
                         <f:param name="order" value="#{SearchIncludeFragment.sortOrder}"/>
                         <f:param name="page" value="1"/>
+                        <f:passThroughAttribute name="aria-label" value="#{bundle['facet.collection.label']}"/>
                         <p:selectBooleanCheckbox value="#{SearchIncludeFragment.selectedTypesList.contains('dataverses') ? true : false}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('dataverses') == null ? true : false}" ariaLabel="#{bundle['facet.collection.label']}"/>
                     </h:outputLink>
                     <!--DATAVERSES ONLY-->
-                    <h:outputLink value="#{widgetWrapper.wrapURL(page)}" disabled="#{false}" styleClass="facetTypeLink chkbox-label-margin #{SearchIncludeFragment.selectedTypesList.contains('dataverses') ? 'facetSelected': ''}">
+                    <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" disabled="#{false}" styleClass="facetTypeLink chkbox-label-margin #{SearchIncludeFragment.selectedTypesList.contains('dataverses') ? 'facetSelected': ''}">
                         <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                         <f:param name="types" value="dataverses"/>
                         <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -127,7 +139,7 @@
                 </div>
                 <!--DATASETS TOGGLE-->
                 <div class="clearfix">
-                    <h:outputLink value="#{widgetWrapper.wrapURL(page)}" styleClass="facetTypeChBox facetLink #{SearchIncludeFragment.selectedTypesList.contains('datasets') ? 'facetSelected': ''}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('datasets') == null}">
+                    <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" styleClass="facetTypeChBox facetLink #{SearchIncludeFragment.selectedTypesList.contains('datasets') ? 'facetSelected': ''}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('datasets') == null}">
                         <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                         <f:param name="types" value="#{SearchIncludeFragment.getNewSelectedTypes('datasets')}"/>
                         <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -136,10 +148,11 @@
                         <f:param name="sort" value="#{SearchIncludeFragment.sortField}"/>
                         <f:param name="order" value="#{SearchIncludeFragment.sortOrder}"/>
                         <f:param name="page" value="1"/>
+                        <f:passThroughAttribute name="aria-label" value="#{bundle['facet.dataset.label']}"/>
                         <p:selectBooleanCheckbox value="#{SearchIncludeFragment.selectedTypesList.contains('datasets') ? true : false}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('datasets') == null ? true : false}" ariaLabel="#{bundle['facet.dataset.label']}"/>
                     </h:outputLink>
                     <!--DATASETS ONLY-->
-                    <h:outputLink value="#{widgetWrapper.wrapURL(page)}" disabled="#{false}" styleClass="facetTypeLink chkbox-label-margin #{SearchIncludeFragment.selectedTypesList.contains('datasets') ? 'facetSelected': ''}">
+                    <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" disabled="#{false}" styleClass="facetTypeLink chkbox-label-margin #{SearchIncludeFragment.selectedTypesList.contains('datasets') ? 'facetSelected': ''}">
                         <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                         <f:param name="types" value="datasets"/>
                         <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -158,7 +171,7 @@
                 </div>
                 <!--FILES TOGGLE-->
                 <div class="clearfix">
-                    <h:outputLink value="#{widgetWrapper.wrapURL(page)}" styleClass="facetTypeChBox facetLink #{SearchIncludeFragment.selectedTypesList.contains('files') ? 'facetSelected': ''}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('files') == null}">
+                    <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" styleClass="facetTypeChBox facetLink #{SearchIncludeFragment.selectedTypesList.contains('files') ? 'facetSelected': ''}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('files') == null}">
                         <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                         <f:param name="types" value="#{SearchIncludeFragment.getNewSelectedTypes('files')}"/>
                         <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -167,10 +180,11 @@
                         <f:param name="sort" value="#{SearchIncludeFragment.sortField}"/>
                         <f:param name="order" value="#{SearchIncludeFragment.sortOrder}"/>
                         <f:param name="page" value="1"/>
+                        <f:passThroughAttribute name="aria-label" value="#{bundle['facet.datafile.label']}"/>
                         <p:selectBooleanCheckbox value="#{SearchIncludeFragment.selectedTypesList.contains('files') ? true : false}" disabled="#{SearchIncludeFragment.getNewSelectedTypes('files') == null ? true : false}" ariaLabel="#{bundle['facet.datafile.label']}"/>
                     </h:outputLink>
                     <!--FILES ONLY-->
-                    <h:outputLink value="#{widgetWrapper.wrapURL(page)}" disabled="#{false}" styleClass="facetTypeLink chkbox-label-margin #{SearchIncludeFragment.selectedTypesList.contains('files') ? 'facetSelected': ''}">
+                    <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" disabled="#{false}" styleClass="facetTypeLink chkbox-label-margin #{SearchIncludeFragment.selectedTypesList.contains('files') ? 'facetSelected': ''}">
                         <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                         <f:param name="types" value="files"/>
                         <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -190,11 +204,11 @@
             </h:form>
 
             <!--NON-TYPE FACETS-->
-            <h:form id="facetCategoryForm" rendered="#{DataversePage.searchResultsCount > 0}">
+            <h:form id="facetCategoryForm" rendered="#{(DataversePage.searchResultsCount > 0) and !SearchIncludeFragment.facetsDisabled}">
                 <p:dataList id="facetCategoryList" value="#{DataversePage.facetCategoryList}" var="facetCategory">
                     <h:outputText value="#{facetCategory.friendlyName}" styleClass="facetCategoryName"/>
                     <p:dataList value="#{facetCategory.facetLabel}" var="facetLabel" rows="#{DataversePage.getNumberOfFacets(facetCategory.name,5)}">
-                        <h:outputLink value="#{widgetWrapper.wrapURL(page)}" rendered="#{!DataversePage.filterQueries.contains(facetLabel.filterQuery)}" styleClass="facetLink">
+                        <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" rendered="#{!DataversePage.filterQueries.contains(facetLabel.filterQuery)}" styleClass="facetLink">
                             <h:outputText value="#{facetLabel.name}">
                                 <c:if test="#{!cvocConf.isEmpty()}">
                                     <f:passThroughAttribute name="lang" value="#{DatasetPage.getFieldLanguage(cvocConf.get(facetCategory.datasetFieldTypeId).getString('languages'))}" />
@@ -215,7 +229,7 @@
                             <f:param name="order" value="#{DataversePage.sortOrder}"/>
                         </h:outputLink>
 
-                        <h:outputLink value="#{widgetWrapper.wrapURL(page)}" rendered="#{DataversePage.filterQueries.contains(facetLabel.filterQuery)}" styleClass="facetLink facetSelected">
+                        <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" rendered="#{DataversePage.filterQueries.contains(facetLabel.filterQuery)}" styleClass="facetLink facetSelected">
                             <h:outputText value="#{facetLabel.name}">
                                 <c:if test="#{!cvocConf.isEmpty()}">
                                     <f:passThroughAttribute name="lang" value="#{DatasetPage.getFieldLanguage(cvocConf.get(facetCategory.datasetFieldTypeId).getString('languages'))}" />
@@ -248,6 +262,12 @@
                     </ui:fragment>
                 </p:dataList>
             </h:form>
+            <!-- "facets temporarily unavailable" message: -->
+            <h:form id="facetsUnavailable" rendered="#{SearchIncludeFragment.facetsDisabled}">
+                <div class="clearfix">
+                    <h:outputText value="#{bundle['dataverse.results.solrFacetsDisabled']}" styleClass="facetTypeLink"/>
+                </div>
+            </h:form>
         </div>
         <div id="dv-main" class="col-sm-8 col-md-9">
             <!--DEBUG BEGIN-->
@@ -287,7 +307,7 @@
                     </ui:fragment>
                     <ui:fragment rendered="#{SearchIncludeFragment.errorFromSolr != null}">
                         <p>
-                            <a data-toggle="collapse" data-parent="#emptyResults" href="#technicalDetails" aria-expanded="true" aria-controls="technicalDetails">[+] #{bundle['dataverse.results.empty.link.technicalDetails']}</a>
+                            <a rel="nofollow" data-toggle="collapse" data-parent="#emptyResults" href="#technicalDetails" aria-expanded="true" aria-controls="technicalDetails">[+] #{bundle['dataverse.results.empty.link.technicalDetails']}</a>
                         </p>
                         <div id="technicalDetails" class="collapse">
                             <pre><h:outputText value="#{SearchIncludeFragment.errorFromSolr}"/></pre>
@@ -358,7 +378,7 @@
                 <ui:repeat value="#{SearchIncludeFragment.filterQueries}" var="filterQuery" rendered="#{!SearchIncludeFragment.wasSolrErrorEncountered()}">
                     <ui:param name="friendlyNames" value="#{SearchIncludeFragment.getFriendlyNamesFromFilterQuery(filterQuery)}"/>
                     <ui:param name="filterType" value="#{SearchIncludeFragment.getTypeFromFilterQuery(filterQuery)}"/>
-                    <h:outputLink value="#{widgetWrapper.wrapURL(page)}" styleClass="facetLink facetSelected" rendered="#{not empty friendlyNames
+                    <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}" styleClass="facetLink facetSelected" rendered="#{not empty friendlyNames
                                 and filterType != DataversePage.searchFieldType and filterType != DataversePage.searchFieldSubtree}">
                         <h:outputText value="#{friendlyNames.get(0)}: "/>
                         <h:outputText value="#{friendlyNames.get(1)}">
@@ -396,7 +416,7 @@
                         </button>
                         <ul class="dropdown-menu pull-right text-left">
                             <li>
-                                <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                                <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                                     <h:outputText value="#{bundle['dataverse.results.btn.sort.option.nameAZ']}" styleClass="#{SearchIncludeFragment.sortedByNameAsc ? 'highlightBold' : ''}"/>
                                     <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                                     <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -409,7 +429,7 @@
                                 </h:outputLink>
                             </li>
                             <li>
-                                <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                                <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                                     <h:outputText value="#{bundle['dataverse.results.btn.sort.option.nameZA']}" styleClass="#{SearchIncludeFragment.sortedByNameDesc ? 'highlightBold' : ''}"/>
                                     <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                                     <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -422,7 +442,7 @@
                                 </h:outputLink>
                             </li>
                             <li>
-                                <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                                <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                                     <h:outputText value="#{bundle['dataverse.results.btn.sort.option.newest']}" styleClass="#{SearchIncludeFragment.sortedByReleaseDateDesc? 'highlightBold' : ''}"/>
                                     <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                                     <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -435,7 +455,7 @@
                                 </h:outputLink>
                             </li>
                             <li>
-                                <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                                <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                                     <h:outputText value="#{bundle['dataverse.results.btn.sort.option.oldest']}" styleClass="#{SearchIncludeFragment.sortedByReleaseDateAsc? 'highlightBold' : ''}"/>
                                     <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                                     <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -449,7 +469,7 @@
                             </li>
                             <ui:fragment rendered="#{SearchIncludeFragment.mode == SearchIncludeFragment.searchModeString}">
                                 <li>
-                                    <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                                    <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                                         <h:outputText value="#{bundle['dataverse.results.btn.sort.option.relevance']}" styleClass="#{SearchIncludeFragment.sortedByRelevance ? 'highlightBold' : ''}"/>
                                         <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                                         <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -495,7 +515,7 @@
                             <h:outputText value="#{bundle['dataset.versionUI.unpublished']}" styleClass="label label-warning" rendered="#{result.unpublishedState}"/>
                         </div>
                         <div class="card-preview-icon-block text-center">
-                            <h:outputLink value="#{!SearchIncludeFragment.rootDv and !result.isInTree ? dvUrl : widgetWrapper.wrapURL(dvUrl)}" target="#{!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView ? '_blank' : ''}">
+                            <h:outputLink rel="nofollow" value="#{!SearchIncludeFragment.rootDv and !result.isInTree ? dvUrl : widgetWrapper.wrapURL(dvUrl)}" target="#{!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView ? '_blank' : ''}">
                                 <img src="#{result.imageUrl}" jsf:rendered="#{!empty result.imageUrl}" alt="#{of:format1(bundle['alt.logo'], result.name)}"/>
                                 <span class="icon-dataverse text-brand" jsf:rendered="#{empty result.imageUrl}"/>
                                 <f:passThroughAttribute name="aria-label" value="#{of:format1(bundle['alt.logo'], result.name)}"/>
@@ -505,7 +525,7 @@
                         <h:outputText value="#{result.dateToDisplayOnCard}" styleClass="text-muted" style="margin-right:.5em;"/>
 
                         <c:set var="dvParentUrl" value="/dataverse/#{result.dataverseParentAlias}"/>
-                        <h:outputLink value="#{!SearchIncludeFragment.rootDv and !result.isInTree ? dvParentUrl : widgetWrapper.wrapURL(dvParentUrl)}" target="#{!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView ? '_blank' : ''}" rendered="#{result.parent.id != SearchIncludeFragment.dataverse.id}">
+                        <h:outputLink rel="nofollow" value="#{!SearchIncludeFragment.rootDv and !result.isInTree ? dvParentUrl : widgetWrapper.wrapURL(dvParentUrl)}" target="#{!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView ? '_blank' : ''}" rendered="#{result.parent.id != SearchIncludeFragment.dataverse.id}">
                             <h:outputText value="#{result.parent.get('name')}"/>
                         </h:outputLink>
 
@@ -555,9 +575,10 @@
                             <h:outputText value="#{bundle['embargoed']}" styleClass="label label-primary" rendered="#{SearchIncludeFragment.isActivelyEmbargoed(result)}"/>
                             <h:outputText value="#{DatasetUtil:getLocaleExternalStatus(result.externalStatus)}" styleClass="label label-info" rendered="#{!empty result.externalStatus and SearchIncludeFragment.canPublishDataset(result.entityId)}"/>
                             <h:outputText value="#{result.userRole}" styleClass="label label-primary" rendered="#{!empty result.userRole}"/>
+                            <h:outputText value="#{bundle['incomplete']}" styleClass="label label-danger" rendered="#{!SearchIncludeFragment.isValid(result)}"/>
                         </div>
                         <div class="card-preview-icon-block text-center">
-                            <a href="#{!SearchIncludeFragment.rootDv and !result.isInTree ? result.datasetUrl : widgetWrapper.wrapURL(result.datasetUrl)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}" aria-label="#{result.title}">
+                            <a rel="nofollow" href="#{!SearchIncludeFragment.rootDv and !result.isInTree ? result.datasetUrl : widgetWrapper.wrapURL(result.datasetUrl)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}" aria-label="#{result.title}">
                                 <img src="#{result.imageUrl}" jsf:rendered="#{!empty result.imageUrl and !result.deaccessionedState}" alt="#{result.title}"/>
                                 <span class="icon-dataset" jsf:rendered="#{empty result.imageUrl or result.deaccessionedState}"/>
                             </a>
@@ -568,7 +589,7 @@
                         <h:outputText styleClass="text-muted" value=" - " rendered="#{result.parent.id != SearchIncludeFragment.dataverse.id}"/>
                         
                         <c:set var="dvParentAlias" value="/dataverse/#{result.dataverseAlias}"/>
-                        <h:outputLink value="#{!SearchIncludeFragment.rootDv and !result.isInTree ? dvParentAlias : widgetWrapper.wrapURL(dvParentAlias)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}" rendered="#{result.parent.id != SearchIncludeFragment.dataverse.id}">
+                        <h:outputLink rel="nofollow" value="#{!SearchIncludeFragment.rootDv and !result.isInTree ? dvParentAlias : widgetWrapper.wrapURL(dvParentAlias)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}" rendered="#{result.parent.id != SearchIncludeFragment.dataverse.id}">
                             <h:outputText value="#{result.parent.get('name')}"/>
                         </h:outputLink>
 
@@ -620,7 +641,7 @@
 
                             <span class="text-danger glyphicon glyphicon-lock" data-toggle="tooltip" title="#{bundle.restricted}" jsf:rendered="#{result.entity.restricted and !permissionsWrapper.hasDownloadFilePermission(result.entity)}"/>
                             <span class="text-success icon-unlock" data-toggle="tooltip" title="#{bundle.restrictedaccess}" jsf:rendered="#{result.entity.restricted and permissionsWrapper.hasDownloadFilePermission(result.entity)}"/>
-                            <a href="#{!SearchIncludeFragment.rootDv and !result.isInTree ? (result.harvested ? result.fileDatasetUrl : result.fileUrl) : widgetWrapper.wrapURL(result.harvested ? result.fileDatasetUrl : result.fileUrl)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}">
+                            <a rel="nofollow" href="#{!SearchIncludeFragment.rootDv and !result.isInTree ? (result.harvested ? result.fileDatasetUrl : result.fileUrl) : widgetWrapper.wrapURL(result.harvested ? result.fileDatasetUrl : result.fileUrl)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}">
                                 <h:outputText value="#{result.name}" style="padding:4px 0;" rendered="#{result.nameHighlightSnippet == null}"/>
                                 <h:outputText value="#{result.nameHighlightSnippet}" style="padding:4px 0;" rendered="#{result.nameHighlightSnippet != null}" escape="false"/>
                                 <h:outputText value=" (#{result.entityId})" style="padding:4px 0;" rendered="#{dataverseSession.debug}"/></a>
@@ -632,7 +653,7 @@
                         </div>
 
                         <div class="card-preview-icon-block text-center">
-                            <a href="#{!SearchIncludeFragment.rootDv and !result.isInTree ? (result.harvested ? result.fileDatasetUrl : result.fileUrl) : widgetWrapper.wrapURL(result.harvested ? result.fileDatasetUrl : result.fileUrl)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}" aria-label="#{result.name}">
+                            <a rel="nofollow" href="#{!SearchIncludeFragment.rootDv and !result.isInTree ? (result.harvested ? result.fileDatasetUrl : result.fileUrl) : widgetWrapper.wrapURL(result.harvested ? result.fileDatasetUrl : result.fileUrl)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}" aria-label="#{result.name}">
                                 <img src="#{result.imageUrl}" jsf:rendered="#{!empty result.imageUrl}" alt="#{result.name}"/>
                                 <span class="icon-#{dataFileServiceBean.getFileThumbnailClass(result.entity)} text-muted h1"
                                       jsf:rendered="#{empty result.imageUrl}"/>
@@ -641,7 +662,7 @@
 
                         <div class="text-muted">
                             <h:outputText value="#{result.dateToDisplayOnCard} - "/>
-                            <a href="#{!SearchIncludeFragment.rootDv and !result.isInTree ? result.fileDatasetUrl : widgetWrapper.wrapURL(result.fileDatasetUrl)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}">
+                            <a rel="nofollow" href="#{!SearchIncludeFragment.rootDv and !result.isInTree ? result.fileDatasetUrl : widgetWrapper.wrapURL(result.fileDatasetUrl)}" target="#{(!SearchIncludeFragment.rootDv and !result.isInTree and widgetWrapper.widgetView) or result.harvested ? '_blank' : ''}">
                                 <h:outputText value="#{result.parent.get('name')}"/></a>
                         </div>
 
diff --git a/src/main/webapp/search-include-pager.xhtml b/src/main/webapp/search-include-pager.xhtml
index ad8fb316dda..88714099c00 100644
--- a/src/main/webapp/search-include-pager.xhtml
+++ b/src/main/webapp/search-include-pager.xhtml
@@ -15,7 +15,7 @@
                 <ul class="pagination">
                     <!--should take you to page 1-->
                     <li class="#{SearchIncludeFragment.page == '1' ? 'disabled' : ''}">
-                        <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                        <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                             <h:outputText value="&#171;"/>
                             <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                             <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -29,7 +29,7 @@
                         </h:outputLink>
                     </li>
                     <li class="#{SearchIncludeFragment.page == '1' ? 'disabled' : ''}">
-                        <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                        <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                             <h:outputText value="&lt; #{bundle.previous}"/>
                             <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                             <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -46,7 +46,7 @@
                                end="#{Math:min(SearchIncludeFragment.totalPages,SearchIncludeFragment.page+Math:max(2,5-SearchIncludeFragment.page))}"
                                varStatus="pageStatus">
                         <li class="#{SearchIncludeFragment.page == pageStatus.index ? 'active' : ''}">
-                            <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                            <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                                 <h:outputText value="#{pageStatus.index}">
                                     <f:convertNumber pattern="###,###" />
                                 </h:outputText>
@@ -65,7 +65,7 @@
                         </li>
                     </c:forEach>
                     <li class="#{SearchIncludeFragment.page == SearchIncludeFragment.totalPages ? 'disabled' : ''}">
-                        <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                        <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                             <h:outputText value="#{bundle.next} &gt;"/>
                             <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                             <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
@@ -79,7 +79,7 @@
                         </h:outputLink>
                     </li>
                     <li class="#{SearchIncludeFragment.page == SearchIncludeFragment.totalPages ? 'disabled' : ''}">
-                        <h:outputLink value="#{widgetWrapper.wrapURL(page)}">
+                        <h:outputLink rel="nofollow" value="#{widgetWrapper.wrapURL(page)}">
                             <h:outputText value="&#187;"/>
                             <f:param name="q" value="#{SearchIncludeFragment.query}"/>
                             <c:forEach items="#{SearchIncludeFragment.filterQueries}" var="clickedFilterQuery" varStatus="status">
diff --git a/src/test/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBeanTest.java
index ad97eba137c..30bd260eb1b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBeanTest.java
@@ -2,16 +2,14 @@
 
 import java.util.Arrays;
 import java.util.List;
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
-import javax.persistence.TypedQuery;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.TypedQuery;
 
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
-import org.junit.Before;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.ArgumentMatchers;
-import org.mockito.Matchers;
 
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -26,7 +24,7 @@ public class AuxiliaryFileServiceBeanTest {
     List<String> types;
     DataFile dataFile;
 
-    @Before
+    @BeforeEach
     public void setup() {
         svc = new AuxiliaryFileServiceBean();
         svc.em = mock(EntityManager.class);
diff --git a/src/test/java/edu/harvard/iq/dataverse/CartTest.java b/src/test/java/edu/harvard/iq/dataverse/CartTest.java
index 7b7ea3331a9..e847cfb3346 100644
--- a/src/test/java/edu/harvard/iq/dataverse/CartTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/CartTest.java
@@ -1,15 +1,15 @@
 package edu.harvard.iq.dataverse;
 
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
 import java.util.List;
 import java.util.Map.Entry;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class CartTest {
 
@@ -17,14 +17,14 @@ public class CartTest {
     private String title;
     private String persistentId;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.cart = new Cart();
         this.title = "title";
         this.persistentId = "persistentId";
     }
 
-    @After
+    @AfterEach
     public void tearDwon() {
         this.cart = null;
         this.title = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataCitationTest.java b/src/test/java/edu/harvard/iq/dataverse/DataCitationTest.java
index dd2c242c086..4097adb0be6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataCitationTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataCitationTest.java
@@ -379,6 +379,7 @@ public void testTitleWithQuotes() throws ParseException {
     }
 
     private DatasetVersion createATestDatasetVersion(String withTitle, boolean withAuthor) throws ParseException {
+        
         Dataverse dataverse = new Dataverse();
         dataverse.setName("LibraScholar");
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBeanTest.java
index edeeea288bf..53add343f4f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataFileCategoryServiceBeanTest.java
@@ -4,12 +4,14 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
 import java.util.Arrays;
 import java.util.List;
@@ -19,7 +21,7 @@
  * 
  * @author adaybujeda
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class DataFileCategoryServiceBeanTest {
 
     @Mock
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataFileServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/DataFileServiceBeanTest.java
index 136916cf449..ab3d0f8ef55 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataFileServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataFileServiceBeanTest.java
@@ -1,8 +1,9 @@
 package edu.harvard.iq.dataverse;
 
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * Test that the DataFileServiceBean classifies DataFiles correctly.
@@ -27,7 +28,7 @@ public DataFileServiceBeanTest() {
     private DataFileServiceBean dataFileServiceBean;
             
     
-    @Before
+    @BeforeEach
     public void setUp() {
         fileWoContentType = createDataFile(null);
         fileWithBogusContentType = createDataFile("foo/bar");
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetAuthorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetAuthorTest.java
index fddb95eda9e..fa225de18ba 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetAuthorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetAuthorTest.java
@@ -1,46 +1,26 @@
 package edu.harvard.iq.dataverse;
 
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
-import java.util.Arrays;
-import java.util.Collection;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-@RunWith(Parameterized.class)
 public class DatasetAuthorTest {
 
-    public String idType;
-    public String idValue;
-    public String expectedIdentifierAsUrl;
-
-    public DatasetAuthorTest(String idType, String idValue, String expectedIdentifierAsUrl) {
-        this.idType = idType;
-        this.idValue = idValue;
-        this.expectedIdentifierAsUrl = expectedIdentifierAsUrl;
-    }
-
-    @Parameters
-    public static Collection<String[]> parameters() {
-        return Arrays.asList(new String[][] {
-            { "ORCID", "0000-0002-1825-0097", "https://orcid.org/0000-0002-1825-0097" },
-            { "ISNI", "0000000121032683", "http://www.isni.org/isni/0000000121032683"},
-            { "LCNA", "n82058243", "http://id.loc.gov/authorities/names/n82058243" },
-            { "VIAF", "172389567", "https://viaf.org/viaf/172389567" },
-            { "GND", "4079154-3", "https://d-nb.info/gnd/4079154-3" },
-            { "ResearcherID", "634082", "https://publons.com/researcher/634082/" },
-            { "ResearcherID", "AAW-9289-2021", "https://publons.com/researcher/AAW-9289-2021/" },
-            { "ResearcherID", "J-9733-2013", "https://publons.com/researcher/J-9733-2013/" },
-            { "ScopusID", "6602344670", "https://www.scopus.com/authid/detail.uri?authorId=6602344670" },
-            { null, null, null, },
-        });
-    }
-
-    @Test
-    public void getIdentifierAsUrl() {
+    @ParameterizedTest
+    @CsvSource(value = {
+        "ORCID,0000-0002-1825-0097,https://orcid.org/0000-0002-1825-0097",
+        "ISNI,0000000121032683,http://www.isni.org/isni/0000000121032683",
+        "LCNA,n82058243,http://id.loc.gov/authorities/names/n82058243",
+        "VIAF,172389567,https://viaf.org/viaf/172389567",
+        "GND,4079154-3,https://d-nb.info/gnd/4079154-3",
+        "ResearcherID,634082,https://publons.com/researcher/634082/",
+        "ResearcherID,AAW-9289-2021,https://publons.com/researcher/AAW-9289-2021/",
+        "ResearcherID,J-9733-2013,https://publons.com/researcher/J-9733-2013/",
+        "ScopusID,6602344670,https://www.scopus.com/authid/detail.uri?authorId=6602344670",
+        "NULL,NULL,NULL"
+    }, nullValues = "NULL")
+    void getIdentifierAsUrl(String idType, String idValue, String expectedIdentifierAsUrl) {
         DatasetAuthor datasetAuthor = new DatasetAuthor();
         if (idType !=null && idValue != null) {
             datasetAuthor.setIdType(idType);
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java
new file mode 100644
index 00000000000..999fadaae06
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java
@@ -0,0 +1,47 @@
+package edu.harvard.iq.dataverse;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+
+import org.junit.jupiter.api.BeforeEach;
+
+public class DatasetFieldDefaultValueTest {
+    private DatasetFieldDefaultValue dataverseContact;
+
+    @BeforeEach
+    public void before() {
+        this.dataverseContact = new DatasetFieldDefaultValue();
+        this.dataverseContact.setId(1L);
+    }
+
+    @Test
+    public void testEqualsWithNull() {
+        assertFalse(this.dataverseContact.equals(null));
+    }
+
+    @Test
+    public void testEqualsWithDifferentClass() {
+        DatasetField datasetField = new DatasetField();
+
+        assertFalse(this.dataverseContact.equals(datasetField));
+    }
+
+    @Test
+    public void testEqualsWithSameClassSameId() {
+        DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue();
+        dataverseContact1.setId(1L);
+
+        assertTrue(this.dataverseContact.equals(dataverseContact1));
+    }
+
+    @Test
+    public void testEqualsWithSameClassDifferentId() {
+        DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue();
+        dataverseContact1.setId(2L);
+
+        assertFalse(this.dataverseContact.equals(dataverseContact1));
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
index ed17bd229d9..a235c9b0061 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTypeTest.java
@@ -6,16 +6,13 @@
 package edu.harvard.iq.dataverse;
 
 import edu.harvard.iq.dataverse.search.SolrField;
-import java.util.Collection;
-import java.util.List;
-import java.util.Set;
-import javax.faces.model.SelectItem;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -26,19 +23,19 @@ public class DatasetFieldTypeTest {
     public DatasetFieldTypeTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValidatorTest.java
index 99482dd9401..659e42a68c4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValidatorTest.java
@@ -5,13 +5,14 @@
  */
 package edu.harvard.iq.dataverse;
 
-import javax.validation.ConstraintValidatorContext;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import jakarta.validation.ConstraintValidatorContext;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+
 import org.mockito.Mockito;
 
 /**
@@ -25,25 +26,26 @@ public class DatasetFieldValidatorTest {
     public DatasetFieldValidatorTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
 
     /**
      * Test of isValid method, of class DatasetFieldValidator.
+     * TODO: this should be converted into one or two ParameterizedTest methods, potentially including a DisplayNameGenerator
      */
     @Test
     public void testIsValid() {
@@ -84,7 +86,7 @@ private void testPrimitiveDatasetField(String test, boolean required, String val
         testDatasetField.setSingleValue(value);
         
         DatasetFieldValidator datasetFieldValidator = new DatasetFieldValidator();
-        assertEquals( test, expectedOutcome, datasetFieldValidator.isValid(testDatasetField, constraintValidatorContext));
+        assertEquals(expectedOutcome, datasetFieldValidator.isValid(testDatasetField, constraintValidatorContext), test);
        
     }
       
@@ -120,7 +122,7 @@ private void testCompoundDatasetField(String test, boolean requiredParent, boole
         
 
         DatasetFieldValidator datasetFieldValidator = new DatasetFieldValidator();
-        assertEquals( test, expectedOutcome, datasetFieldValidator.isValid(child1DatasetField, constraintValidatorContext));
+        assertEquals(expectedOutcome, datasetFieldValidator.isValid(child1DatasetField, constraintValidatorContext), test);
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
index ceaa69ade4e..3f85acc1f87 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldValueValidatorTest.java
@@ -7,10 +7,10 @@
 
 import java.util.Set;
 import java.util.regex.Pattern;
-import javax.validation.ConstraintValidatorContext;
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
+import jakarta.validation.ConstraintValidatorContext;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
 
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.params.ParameterizedTest;
@@ -213,4 +213,19 @@ public void testInvalidEmail() {
             assertTrue(c.getMessage().contains("email"));
         });
     }
+    @Test
+    public void testBoundingBoxValidity() {
+        // valid tests
+        assertTrue(DatasetFieldValueValidator.validateBoundingBox("-180", "180", "90", "-90"));
+        assertTrue(DatasetFieldValueValidator.validateBoundingBox("0", "0", "0", "0"));
+
+        // invalid tests
+        assertTrue(!DatasetFieldValueValidator.validateBoundingBox("-180", null, "90", null));
+        assertTrue(!DatasetFieldValueValidator.validateBoundingBox(null, "180", null, "90"));
+        assertTrue(!DatasetFieldValueValidator.validateBoundingBox("-180", "180", "90", "junk"));
+        assertTrue(!DatasetFieldValueValidator.validateBoundingBox("45", "40", "90", "0"));
+        assertTrue(!DatasetFieldValueValidator.validateBoundingBox("360", "0", "90", "-90"));
+        assertTrue(!DatasetFieldValueValidator.validateBoundingBox("", "", "", ""));
+        assertTrue(!DatasetFieldValueValidator.validateBoundingBox(null, null, null, null));
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
index 45e47460ffe..2153a336303 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetTest.java
@@ -3,10 +3,11 @@
 import edu.harvard.iq.dataverse.DatasetVersion.VersionState;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -23,38 +24,38 @@ public class DatasetTest {
     @Test
     public void testIsLockedFor() {
         Dataset sut = new Dataset();
-        assertFalse( "Initially verify that the dataset is not locked because data being ingested", sut.isLockedFor(DatasetLock.Reason.Ingest) );
+        assertFalse(sut.isLockedFor(DatasetLock.Reason.Ingest), "Initially verify that the dataset is not locked because data being ingested");
 
         DatasetLock dl = new DatasetLock(DatasetLock.Reason.Ingest, MocksFactory.makeAuthenticatedUser("jane", "doe"));
         sut.addLock(dl);
-        assertTrue( "Verify that the dataset now has an ingest lock", sut.isLockedFor(DatasetLock.Reason.Ingest) );
-        assertFalse( "Verify that the dataset does not have a workflow lock", sut.isLockedFor(DatasetLock.Reason.Workflow) );
+        assertTrue(sut.isLockedFor(DatasetLock.Reason.Ingest), "Verify that the dataset now has an ingest lock");
+        assertFalse(sut.isLockedFor(DatasetLock.Reason.Workflow), "Verify that the dataset does not have a workflow lock");
     }
     
     @Test
     public void testLocksManagement() {
         Dataset sut = new Dataset();
-        assertFalse( "Initially verify that the dataset is not locked", sut.isLocked() );
+        assertFalse(sut.isLocked(), "Initially verify that the dataset is not locked");
         
         DatasetLock dlIngest = new DatasetLock(DatasetLock.Reason.Ingest, MocksFactory.makeAuthenticatedUser("jane", "doe"));
         dlIngest.setId(MocksFactory.nextId());
         sut.addLock(dlIngest);
-        assertTrue( "After adding an ingest lock, verify that the dataset is locked", sut.isLocked() );
+        assertTrue(sut.isLocked(), "After adding an ingest lock, verify that the dataset is locked");
 
         final DatasetLock dlInReview = new DatasetLock(DatasetLock.Reason.InReview, MocksFactory.makeAuthenticatedUser("jane", "doe"));
         dlInReview.setId(MocksFactory.nextId());
         sut.addLock(dlInReview);
-        assertEquals( "After adding a review lock, verify that the dataset is locked by two locks", 2, sut.getLocks().size() );
+        assertEquals(2, sut.getLocks().size(), "After adding a review lock, verify that the dataset is locked by two locks");
         
         DatasetLock retrievedDl = sut.getLockFor(DatasetLock.Reason.Ingest);
         assertEquals( dlIngest, retrievedDl );
         sut.removeLock(dlIngest);
-        assertNull( "After removing the ingest lock, verify that the dataset does not have any ingest locks", sut.getLockFor(DatasetLock.Reason.Ingest) );
+        assertNull(sut.getLockFor(DatasetLock.Reason.Ingest), "After removing the ingest lock, verify that the dataset does not have any ingest locks");
         
-        assertTrue( "After removing the ingest lock, verify that the dataset is still locked (review lock)", sut.isLocked() );
+        assertTrue(sut.isLocked(), "After removing the ingest lock, verify that the dataset is still locked (review lock)");
         
         sut.removeLock(dlInReview);
-        assertFalse( "After removing the review lock, verify that the dataset is not locked anymore", sut.isLocked() );
+        assertFalse(sut.isLocked(), "After removing the review lock, verify that the dataset is not locked anymore");
         
     }
 
@@ -78,7 +79,7 @@ public void testLocksManagement() {
     private DatasetVersion draftVersion;
     private DatasetVersion releasedVersion;
 
-    @Before
+    @BeforeEach
     public void before() {
         this.archivedVersion = new DatasetVersion();
         this.archivedVersion.setVersionState(VersionState.ARCHIVED);
@@ -93,7 +94,7 @@ public void before() {
         this.releasedVersion.setVersionState(VersionState.RELEASED);
     }
 
-    @After
+    @AfterEach
     public void after() {
         this.archivedVersion = null;
         this.deaccessionedVersion = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionServiceBeanTest.java
index 7c608888b7b..424ff54fe02 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionServiceBeanTest.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.mockito.Mockito.mock;
 
 import java.util.Arrays;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
@@ -18,13 +18,13 @@ public class DatasetVersionServiceBeanTest {
 
   private DatasetVersionServiceBean datasetVersionServiceBean;
 
-  @Before
+  @BeforeEach
   public void setUp() {
     this.datasetVersionServiceBean = new DatasetVersionServiceBean();
     
   }
 
-  @After
+  @AfterEach
   public void tearDown() {
     this.datasetVersionServiceBean = null;
   }
diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
index a8e011d0036..4cd6c4dfaa7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DatasetVersionTest.java
@@ -14,10 +14,10 @@
 import java.util.Date;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java
new file mode 100644
index 00000000000..2abb10a485d
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java
@@ -0,0 +1,47 @@
+package edu.harvard.iq.dataverse;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+
+import org.junit.jupiter.api.BeforeEach;
+
+public class DataverseContactTest {
+    private DataverseContact dataverseContact;
+
+    @BeforeEach
+    public void before() {
+        this.dataverseContact = new DataverseContact();
+        this.dataverseContact.setId(1L);
+    }
+
+    @Test
+    public void testEqualsWithNull() {
+        assertFalse(this.dataverseContact.equals(null));
+    }
+
+    @Test
+    public void testEqualsWithDifferentClass() {
+        DatasetFieldType datasetFieldType = new DatasetFieldType();
+
+        assertFalse(this.dataverseContact.equals(datasetFieldType));
+    }
+
+    @Test
+    public void testEqualsWithSameClassSameId() {
+        DataverseContact dataverseContact1 = new DataverseContact();
+        dataverseContact1.setId(1L);
+
+        assertTrue(this.dataverseContact.equals(dataverseContact1));
+    }
+
+    @Test
+    public void testEqualsWithSameClassDifferentId() {
+        DataverseContact dataverseContact1 = new DataverseContact();
+        dataverseContact1.setId(2L);
+
+        assertFalse(this.dataverseContact.equals(dataverseContact1));
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java
new file mode 100644
index 00000000000..7ae50439c10
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java
@@ -0,0 +1,47 @@
+package edu.harvard.iq.dataverse;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+
+import org.junit.jupiter.api.BeforeEach;
+
+public class DataverseFacetTest {
+    private DataverseFacet dataverseFacet;
+
+    @BeforeEach
+    public void before() {
+        this.dataverseFacet = new DataverseFacet();
+        this.dataverseFacet.setId(1L);
+    }
+
+    @Test
+    public void testEqualsWithNull() {
+        assertFalse(this.dataverseFacet.equals(null));
+    }
+
+    @Test
+    public void testEqualsWithDifferentClass() {
+        DatasetFieldType datasetFieldType = new DatasetFieldType();
+
+        assertFalse(this.dataverseFacet.equals(datasetFieldType));
+    }
+
+    @Test
+    public void testEqualsWithSameClassSameId() {
+        DataverseFacet dataverseFacet1 = new DataverseFacet();
+        dataverseFacet1.setId(1L);
+
+        assertTrue(this.dataverseFacet.equals(dataverseFacet1));
+    }
+
+    @Test
+    public void testEqualsWithSameClassDifferentId() {
+        DataverseFacet dataverseFacet1 = new DataverseFacet();
+        dataverseFacet1.setId(2L);
+
+        assertFalse(this.dataverseFacet.equals(dataverseFacet1));
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java
new file mode 100644
index 00000000000..b024dc3bfd3
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java
@@ -0,0 +1,47 @@
+package edu.harvard.iq.dataverse;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+
+import org.junit.jupiter.api.BeforeEach;
+
+public class DataverseFeaturedDataverseTest {
+    private DataverseFeaturedDataverse dataverseFeaturedDataverse;
+
+    @BeforeEach
+    public void before() {
+        this.dataverseFeaturedDataverse = new DataverseFeaturedDataverse();
+        this.dataverseFeaturedDataverse.setId(1L);
+    }
+
+    @Test
+    public void testEqualsWithNull() {
+        assertFalse(this.dataverseFeaturedDataverse.equals(null));
+    }
+
+    @Test
+    public void testEqualsWithDifferentClass() {
+        DatasetFieldType datasetFieldType = new DatasetFieldType();
+
+        assertFalse(this.dataverseFeaturedDataverse.equals(datasetFieldType));
+    }
+
+    @Test
+    public void testEqualsWithSameClassSameId() {
+        DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse();
+        dataverseFeaturedDataverse1.setId(1L);
+
+        assertTrue(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1));
+    }
+
+    @Test
+    public void testEqualsWithSameClassDifferentId() {
+        DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse();
+        dataverseFeaturedDataverse1.setId(2L);
+
+        assertFalse(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1));
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacetTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacetTest.java
index 7ae2d26a113..e0bcfb2369d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacetTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataverseMetadataBlockFacetTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseTest.java
index cb0561dd0f4..4e2bd5b3c2d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/DataverseTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/DataverseTest.java
@@ -3,8 +3,8 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import java.util.Arrays;
 import java.util.List;
@@ -18,7 +18,7 @@ public class DataverseTest {
     private Dataverse OWNER;
     private List<DataverseMetadataBlockFacet> OWNER_METADATABLOCKFACETS;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         OWNER = new Dataverse();
         OWNER.setId(MocksFactory.nextId());
diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java
new file mode 100644
index 00000000000..e6721e34c6f
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java
@@ -0,0 +1,47 @@
+package edu.harvard.iq.dataverse;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+
+import org.junit.jupiter.api.BeforeEach;
+
+public class DataverseThemeTest {
+    private DataverseTheme dataverseTheme;
+
+    @BeforeEach
+    public void before() {
+        this.dataverseTheme = new DataverseTheme();
+        this.dataverseTheme.setId(1L);
+    }
+
+    @Test
+    public void testEqualsWithNull() {
+        assertFalse(this.dataverseTheme.equals(null));
+    }
+
+    @Test
+    public void testEqualsWithDifferentClass() {
+        DatasetFieldType datasetFieldType = new DatasetFieldType();
+
+        assertFalse(this.dataverseTheme.equals(datasetFieldType));
+    }
+
+    @Test
+    public void testEqualsWithSameClassSameId() {
+        DataverseTheme dataverseTheme1 = new DataverseTheme();
+        dataverseTheme1.setId(1L);
+
+        assertTrue(this.dataverseTheme.equals(dataverseTheme1));
+    }
+
+    @Test
+    public void testEqualsWithSameClassDifferentId() {
+        DataverseTheme dataverseTheme1 = new DataverseTheme();
+        dataverseTheme1.setId(2L);
+
+        assertFalse(this.dataverseTheme.equals(dataverseTheme1));
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/EditDataFilesPageHelperTest.java b/src/test/java/edu/harvard/iq/dataverse/EditDataFilesPageHelperTest.java
index c95f7f105fa..39d43fec191 100644
--- a/src/test/java/edu/harvard/iq/dataverse/EditDataFilesPageHelperTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/EditDataFilesPageHelperTest.java
@@ -3,12 +3,12 @@
 import edu.harvard.iq.dataverse.util.file.CreateDataFileResult;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
 import java.util.Arrays;
 import java.util.Collections;
@@ -19,7 +19,7 @@
  *
  * @author adaybujeda
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class EditDataFilesPageHelperTest {
 
     private static final String FILENAME = UUID.randomUUID().toString();
diff --git a/src/test/java/edu/harvard/iq/dataverse/ExternalIdentifierTest.java b/src/test/java/edu/harvard/iq/dataverse/ExternalIdentifierTest.java
index c14d2e4086e..dbd732d2e55 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ExternalIdentifierTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ExternalIdentifierTest.java
@@ -1,11 +1,9 @@
 package edu.harvard.iq.dataverse;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import java.util.regex.Pattern;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class ExternalIdentifierTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/FileDirectoryNameValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/FileDirectoryNameValidatorTest.java
index 9d49dcdb070..5ff74aea603 100644
--- a/src/test/java/edu/harvard/iq/dataverse/FileDirectoryNameValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/FileDirectoryNameValidatorTest.java
@@ -1,39 +1,25 @@
 package edu.harvard.iq.dataverse;
 
-import java.util.Arrays;
-import java.util.Collection;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
-@RunWith(Parameterized.class)
-public class FileDirectoryNameValidatorTest {
-
-    public boolean isValid;
-    public String fileDirectoryName;
-
-    public FileDirectoryNameValidatorTest(boolean isValid, String fileDirectoryName) {
-        this.isValid = isValid;
-        this.fileDirectoryName = fileDirectoryName;
-    }
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-    @Parameterized.Parameters
-    public static Collection<Object[]> parameters() {
-        return Arrays.asList(new Object[][]{
-            {true, "foobar"},
-            // The leading "-" gets stripped.
-            {true, "-foobar"},
-            {true, "_foobar"},
-            {true, "foobar_"},
-            {true, "folder/sub"},
-            {true, "folder///sub"},
-            {true, "folder///sub/third"},
-            {false, "f**bar"},});
-    }
+public class FileDirectoryNameValidatorTest {
 
-    @Test
-    public void testIsFileDirectoryNameValid() {
+    @ParameterizedTest
+    @CsvSource({
+        "true,foobar",
+        // The leading "-" gets stripped.
+        "true,-foobar",
+        "true,_foobar",
+        "true,foobar_",
+        "true,folder/sub",
+        "true,folder///sub",
+        "true,folder///sub/third",
+        "false,f**bar"
+    })
+    public void testIsFileDirectoryNameValid(boolean isValid, String fileDirectoryName) {
         assertEquals(isValid, FileDirectoryNameValidator.isFileDirectoryNameValid(fileDirectoryName, null));
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java b/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java
index 023121f5cb2..394f08c6e93 100644
--- a/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/GlobalIdTest.java
@@ -1,9 +1,14 @@
 package edu.harvard.iq.dataverse;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Rule;
-import org.junit.rules.ExpectedException;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+
+import edu.harvard.iq.dataverse.pidproviders.PidUtil;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -11,13 +16,10 @@
  */
 public class GlobalIdTest {
 
-    @Rule
-    public ExpectedException exception = ExpectedException.none();
-
     @Test
     public void testValidDOI() {
         System.out.println("testValidDOI");
-        GlobalId instance = new GlobalId("doi:10.5072/FK2/BYM3IW");
+        GlobalId instance = new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.5072","FK2/BYM3IW", "/", DOIServiceBean.DOI_RESOLVER_URL, null);
 
         assertEquals("doi", instance.getProtocol());
         assertEquals("10.5072", instance.getAuthority());
@@ -28,7 +30,7 @@ public void testValidDOI() {
     @Test
     public void testValidHandle() {
         System.out.println("testValidDOI");
-        GlobalId instance = new GlobalId("hdl:1902.1/111012");
+        GlobalId instance = new GlobalId(HandlenetServiceBean.HDL_PROTOCOL, "1902.1","111012", "/", HandlenetServiceBean.HDL_RESOLVER_URL, null);
 
         assertEquals("hdl", instance.getProtocol());
         assertEquals("1902.1", instance.getAuthority());
@@ -43,7 +45,7 @@ public void testContructFromDataset() {
         testDS.setAuthority("10.5072");
         testDS.setIdentifier("FK2/BYM3IW");
 
-        GlobalId instance = new GlobalId(testDS);
+        GlobalId instance = testDS.getGlobalId();
 
         assertEquals("doi", instance.getProtocol());
         assertEquals("10.5072", instance.getAuthority());
@@ -54,53 +56,53 @@ public void testContructFromDataset() {
     public void testInject() {
         System.out.println("testInject (weak test)");
 
-        String badProtocol = "hdl:'Select value from datasetfieldvalue';/ha";
+        // String badProtocol = "hdl:'Select value from datasetfieldvalue';/ha";
+        GlobalId instance = PidUtil.parseAsGlobalID(HandlenetServiceBean.HDL_PROTOCOL, "'Select value from datasetfieldvalue';", "ha");
+        assertNull(instance); 
 
-        GlobalId instance = new GlobalId(badProtocol);
-
-        assertEquals("hdl", instance.getProtocol());
-        assertEquals("Selectvaluefromdatasetfieldvalue", instance.getAuthority());
-        assertEquals("ha", instance.getIdentifier());
         //exception.expect(IllegalArgumentException.class);
         //exception.expectMessage("Failed to parse identifier: " + badProtocol);
         //new GlobalId(badProtocol);
     }
 
     @Test
+    @Disabled /* Could now add a 'doy' protocol so the test would have to check against registered PIDProviders (currently Beans)*/
     public void testUnknownProtocol() {
         System.out.println("testUnknownProtocol");
 
         String badProtocol = "doy:10.5072/FK2/BYM3IW";
-
-        exception.expect(IllegalArgumentException.class);
-        exception.expectMessage("Failed to parse identifier: " + badProtocol);
-        new GlobalId(badProtocol);
+        
+        //exception.expect(IllegalArgumentException.class);
+        //exception.expectMessage("Failed to parse identifier: " + badProtocol);
+        //new GlobalId(badProtocol);
     }
 
     @Test
+    @Disabled /* Could now change parsing rules so the test would have to check against registered PIDProviders (currently Beans)*/
     public void testBadIdentifierOnePart() {
         System.out.println("testBadIdentifierOnePart");
 
-        exception.expect(IllegalArgumentException.class);
-        exception.expectMessage("Failed to parse identifier: 1part");
-        new GlobalId("1part");
+        //exception.expect(IllegalArgumentException.class);
+        //exception.expectMessage("Failed to parse identifier: 1part");
+        //new GlobalId("1part");
     }
 
     @Test
+    @Disabled /* Could now change parsing rules so the test would have to check against registered PIDProviders (currently Beans)*/
     public void testBadIdentifierTwoParts() {
         System.out.println("testBadIdentifierTwoParts");
 
-        exception.expect(IllegalArgumentException.class);
-        exception.expectMessage("Failed to parse identifier: doi:2part/blah");
-        new GlobalId("doi:2part/blah");
+        //exception.expect(IllegalArgumentException.class);
+        //exception.expectMessage("Failed to parse identifier: doi:2part/blah");
+        //new GlobalId("doi:2part/blah");
     }
 
     @Test
     public void testIsComplete() {
-        assertFalse(new GlobalId("doi", "10.123", null).isComplete());
-        assertFalse(new GlobalId("doi", null, "123").isComplete());
-        assertFalse(new GlobalId(null, "10.123", "123").isComplete());
-        assertTrue(new GlobalId("doi", "10.123", "123").isComplete());
+        assertFalse(new GlobalId("doi", "10.123", null, null, null, null).isComplete());
+        assertFalse(new GlobalId("doi", null, "123", null, null, null).isComplete());
+        assertFalse(new GlobalId(null, "10.123", "123", null, null, null).isComplete());
+        assertTrue(new GlobalId("doi", "10.123", "123", null, null, null).isComplete());
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java
index 025e3fbb3f7..32bf9702ee7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/MailServiceBeanTest.java
@@ -7,11 +7,11 @@
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.CsvSource;
 
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.InternetAddress;
 
 import java.io.UnsupportedEncodingException;
 
-import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 class MailServiceBeanTest {
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/MetadataBlockTest.java b/src/test/java/edu/harvard/iq/dataverse/MetadataBlockTest.java
index 85aaa37bb30..8644de89709 100644
--- a/src/test/java/edu/harvard/iq/dataverse/MetadataBlockTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/MetadataBlockTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.util.UUID;
diff --git a/src/test/java/edu/harvard/iq/dataverse/NonEssentialTests.java b/src/test/java/edu/harvard/iq/dataverse/NonEssentialTests.java
deleted file mode 100644
index 612904cbd26..00000000000
--- a/src/test/java/edu/harvard/iq/dataverse/NonEssentialTests.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package edu.harvard.iq.dataverse;
-
-/**
- * Tests annotated as non-essential will not be run by default on developers'
- * laptops but they will run on continuous integration platforms like Travis CI.
- * To work on one of these tests, you have to comment out the annotation.
- */
-public interface NonEssentialTests {
-
-}
diff --git a/src/test/java/edu/harvard/iq/dataverse/PermissionsWrapperTest.java b/src/test/java/edu/harvard/iq/dataverse/PermissionsWrapperTest.java
index 7ebc7e3c807..751a90fb447 100644
--- a/src/test/java/edu/harvard/iq/dataverse/PermissionsWrapperTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/PermissionsWrapperTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse;
 
-import static org.junit.Assert.assertFalse;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.Mockito.mock;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
@@ -19,14 +19,14 @@ public class PermissionsWrapperTest {
 
     private PermissionsWrapper permissionWrapper;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.permissionWrapper = new PermissionsWrapper();
         this.permissionWrapper.permissionService = mock(PermissionServiceBean.class);
         this.permissionWrapper.dvRequestService = mock(DataverseRequestServiceBean.class);
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
         this.permissionWrapper = null;
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java
index b49c1cfadc7..542d00d0d78 100644
--- a/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/PersistentIdentifierServiceBeanTest.java
@@ -8,32 +8,44 @@
 import edu.harvard.iq.dataverse.engine.TestCommandContext;
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import java.util.List;
-import java.util.Map;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
  * @author michael
  */
+@ExtendWith(MockitoExtension.class)
 public class PersistentIdentifierServiceBeanTest {
     
-    
+    @Mock
+    private SettingsServiceBean settingsServiceBean;
+
+    @InjectMocks
     DOIEZIdServiceBean ezidServiceBean = new DOIEZIdServiceBean();
+    @InjectMocks
     DOIDataCiteServiceBean dataCiteServiceBean = new DOIDataCiteServiceBean();
+    @InjectMocks
     FakePidProviderServiceBean fakePidProviderServiceBean = new FakePidProviderServiceBean();
     HandlenetServiceBean hdlServiceBean = new HandlenetServiceBean();
+    PermaLinkPidProviderServiceBean permaLinkServiceBean = new PermaLinkPidProviderServiceBean(); 
     
     CommandContext ctxt;
     
-    @Before
+    @BeforeEach
     public void setup() {
+        MockitoAnnotations.initMocks(this);
         ctxt = new TestCommandContext(){
             @Override
             public HandlenetServiceBean handleNet() {
@@ -55,6 +67,11 @@ public FakePidProviderServiceBean fakePidProvider() {
                 return fakePidProviderServiceBean;
             }
             
+            @Override
+            public PermaLinkPidProviderServiceBean permaLinkProvider() {
+                return permaLinkServiceBean;
+            }
+            
         };
     }
     
@@ -64,19 +81,28 @@ public FakePidProviderServiceBean fakePidProvider() {
     @Test
     public void testGetBean_String_CommandContext_OK() {
         ctxt.settings().setValueForKey( SettingsServiceBean.Key.DoiProvider, "EZID");
+        Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider, "")).thenReturn("EZID");
+        
         assertEquals(ezidServiceBean, 
                      GlobalIdServiceBean.getBean("doi", ctxt));
         
         ctxt.settings().setValueForKey( SettingsServiceBean.Key.DoiProvider, "DataCite");
+        Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider, "")).thenReturn("DataCite");
+
         assertEquals(dataCiteServiceBean, 
                      GlobalIdServiceBean.getBean("doi", ctxt));
 
         ctxt.settings().setValueForKey(SettingsServiceBean.Key.DoiProvider, "FAKE");
+        Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider, "")).thenReturn("FAKE");
+
         assertEquals(fakePidProviderServiceBean,
                 GlobalIdServiceBean.getBean("doi", ctxt));
 
         assertEquals(hdlServiceBean, 
                      GlobalIdServiceBean.getBean("hdl", ctxt));
+        
+        assertEquals(permaLinkServiceBean, 
+                GlobalIdServiceBean.getBean("perma", ctxt));
     }
     
      @Test
@@ -95,6 +121,7 @@ public void testGetBean_String_CommandContext_BAD() {
     public void testGetBean_CommandContext() {
         ctxt.settings().setValueForKey( SettingsServiceBean.Key.Protocol, "doi");
         ctxt.settings().setValueForKey( SettingsServiceBean.Key.DoiProvider, "EZID");
+        Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DoiProvider, "")).thenReturn("EZID");
         
         assertEquals(ezidServiceBean, 
                      GlobalIdServiceBean.getBean("doi", ctxt));
@@ -102,6 +129,10 @@ public void testGetBean_CommandContext() {
         ctxt.settings().setValueForKey( SettingsServiceBean.Key.Protocol, "hdl");
         assertEquals(hdlServiceBean, 
                      GlobalIdServiceBean.getBean("hdl", ctxt));
+        
+        ctxt.settings().setValueForKey( SettingsServiceBean.Key.Protocol, "perma");
+        assertEquals(permaLinkServiceBean, 
+                     GlobalIdServiceBean.getBean("perma", ctxt));
     }
 
    
diff --git a/src/test/java/edu/harvard/iq/dataverse/RoleAssignmentTest.java b/src/test/java/edu/harvard/iq/dataverse/RoleAssignmentTest.java
index e4b33b83930..f8138537cd7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/RoleAssignmentTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/RoleAssignmentTest.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.RoleAssignee;
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 
-import org.junit.After;
-import org.junit.Before;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
 
 public class RoleAssignmentTest {
 
@@ -22,7 +22,7 @@ public class RoleAssignmentTest {
     private Dataset dataset;
     private String privateUrlToken;
 
-    @Before
+    @BeforeEach
     public void before() {
         this.dataverseRole = new DataverseRole();
         this.roleAssignee = GuestUser.get();
@@ -30,7 +30,7 @@ public void before() {
         this.privateUrlToken = "some-token";
     }
 
-    @After
+    @AfterEach
     public void after() {
         this.dataverseRole = null;
         this.roleAssignee = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecordTest.java b/src/test/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecordTest.java
index 1726ea70114..6a965b17a16 100644
--- a/src/test/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecordTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/actionlogging/ActionLogRecordTest.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.actionlogging;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.Date;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord.ActionType;
 import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord.Result;
@@ -16,7 +16,7 @@ public class ActionLogRecordTest {
 
     private ActionLogRecord referenceRecord;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.referenceRecord = new ActionLogRecord(ActionType.Admin, "subType1");
         this.referenceRecord.setEndTime(new Date());
@@ -25,7 +25,7 @@ public void setUp() {
         this.referenceRecord.setInfo("info1");
     }
 
-    @After
+    @AfterEach
     public void tearDwon() {
         this.referenceRecord = null;
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java
index 3e088c184ad..c67dfeeadfa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AbstractApiBeanTest.java
@@ -1,24 +1,22 @@
 package edu.harvard.iq.dataverse.api;
 
-import edu.harvard.iq.dataverse.util.MockResponse;
 import java.io.StringReader;
 import java.io.StringWriter;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.json.JsonWriter;
-import javax.json.JsonWriterFactory;
-import javax.json.stream.JsonGenerator;
-import javax.ws.rs.core.Response;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import org.junit.Before;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonWriter;
+import jakarta.json.JsonWriterFactory;
+import jakarta.json.stream.JsonGenerator;
+import jakarta.ws.rs.core.Response;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class AbstractApiBeanTest {
 
@@ -26,7 +24,7 @@ public class AbstractApiBeanTest {
 
     AbstractApiBeanImpl sut;
 
-    @Before
+    @BeforeEach
     public void before() {
         sut = new AbstractApiBeanImpl();
     }
@@ -40,15 +38,15 @@ public void testParseBooleanOrDie_ok() throws Exception {
         assertFalse(sut.parseBooleanOrDie("0"));
         assertFalse(sut.parseBooleanOrDie("no"));
     }
-
-    @Test(expected = Exception.class)
-    public void testParseBooleanOrDie_invalid() throws Exception {
-        sut.parseBooleanOrDie("I'm not a boolean value!");
+    
+    @Test
+    void testParseBooleanOrDie_invalid() {
+        assertThrows(Exception.class, () -> sut.parseBooleanOrDie("I'm not a boolean value!"));
     }
 
     @Test
-    public void testFailIfNull_ok() throws Exception {
-        sut.failIfNull(sut, "");
+    void testFailIfNull_ok() {
+        assertDoesNotThrow(() -> sut.failIfNull(sut, ""));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
index d6aac80b435..d08f916243f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
@@ -5,32 +5,30 @@
  */
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.DataFile;
-import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import java.io.IOException;
 import java.util.zip.ZipInputStream;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+
+import jakarta.json.Json;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import java.util.zip.ZipEntry;
 import java.io.ByteArrayOutputStream;
-import java.io.File;
 import java.io.InputStream;
-import java.nio.file.Path;
 import java.util.HashMap;
-import static javax.ws.rs.core.Response.Status.OK;
+
 import org.hamcrest.collection.IsMapContaining;
-import static junit.framework.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
+
+import static jakarta.ws.rs.core.Response.Status.*;
+import static org.hamcrest.MatcherAssert.*;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.not;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -82,7 +80,7 @@ public class AccessIT {
     private static String testFileFromZipUploadWithFoldersChecksum3 = "00433ccb20111f9d40f0e5ab6fa8396f";
 
     
-    @BeforeClass
+    @BeforeAll
     public static void setUp() throws InterruptedException {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         
@@ -124,7 +122,7 @@ public static void setUp() throws InterruptedException {
         tabFile2NameConvert = tabFile2Name.substring(0, tabFile2Name.indexOf(".dta")) + ".tab";
         String tab2PathToFile = "scripts/search/data/tabular/" + tabFile2Name;
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile2Name, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile2Name);
 
         Response tab2AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab2PathToFile, apiToken);
         tabFile2Id = JsonPath.from(tab2AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
@@ -133,13 +131,13 @@ public static void setUp() throws InterruptedException {
         tabFile3NameRestrictedConvert = tabFile3NameRestricted.substring(0, tabFile3NameRestricted.indexOf(".dta")) + ".tab";
         String tab3PathToFile = "scripts/search/data/tabular/" + tabFile3NameRestricted;
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));     
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted);
         
         Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab3PathToFile, apiToken);
 
         tabFile3IdRestricted = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestricted);
         
         Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestricted.toString(), true, apiToken);
         restrictResponse.prettyPrint();
@@ -158,11 +156,11 @@ public static void setUp() throws InterruptedException {
         String tab4PathToFile = "scripts/search/data/tabular/" + tabFile4NameUnpublished;
         Response tab4AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab4PathToFile, apiToken);
         tabFile4IdUnpublished = JsonPath.from(tab4AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile2Name, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile2Name);
                         
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDown() {   
 
         Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
@@ -200,6 +198,8 @@ public void testDownloadSingleFile() {
         //Not logged in non-restricted
         Response anonDownloadOriginal = UtilIT.downloadFileOriginal(tabFile1Id);
         Response anonDownloadConverted = UtilIT.downloadFile(tabFile1Id);
+        Response anonDownloadConvertedNullKey = UtilIT.downloadFile(tabFile1Id, null);
+
         // ... and download the same tabular data file, but without the variable name header added:
         Response anonDownloadTabularNoHeader = UtilIT.downloadTabularFileNoVarHeader(tabFile1Id);
         // ... and download the same tabular file, this time requesting the "format=tab" explicitly:
@@ -208,6 +208,8 @@ public void testDownloadSingleFile() {
         assertEquals(OK.getStatusCode(), anonDownloadConverted.getStatusCode());
         assertEquals(OK.getStatusCode(), anonDownloadTabularNoHeader.getStatusCode());
         assertEquals(OK.getStatusCode(), anonDownloadTabularWithFormatName.getStatusCode());
+        assertEquals(UNAUTHORIZED.getStatusCode(), anonDownloadConvertedNullKey.getStatusCode());
+        
         int origSizeAnon = anonDownloadOriginal.getBody().asByteArray().length;
         int convertSizeAnon = anonDownloadConverted.getBody().asByteArray().length;
         int tabularSizeNoVarHeader = anonDownloadTabularNoHeader.getBody().asByteArray().length;
@@ -425,10 +427,7 @@ private HashMap<String,ByteArrayOutputStream> readZipResponse(InputStream iStrea
                 }
 
                 String name = entry.getName(); 
-//                String s = String.format("Entry: %s len %d added %TD",
-//                                entry.getName(), entry.getSize(),
-//                                new Date(entry.getTime()));
-//                System.out.println(s);
+
 
                 // Once we get the entry from the zStream, the zStream is
                 // positioned read to read the raw data, and we keep
@@ -468,7 +467,7 @@ private HashMap<String,ByteArrayOutputStream> readZipResponse(InputStream iStrea
     
     @Test
     public void testRequestAccess() throws InterruptedException {
-
+    
         String pathToJsonFile = "scripts/api/data/dataset-create-new.json";
         Response createDatasetResponse = UtilIT.createDatasetViaNativeApi(dataverseAlias, pathToJsonFile, apiToken);
         createDatasetResponse.prettyPrint();
@@ -484,7 +483,7 @@ public void testRequestAccess() throws InterruptedException {
         Response tab3AddResponse = UtilIT.uploadFileViaNative(datasetIdNew.toString(), tab3PathToFile, apiToken);
         Integer tabFile3IdRestrictedNew = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tab3PathToFile , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tab3PathToFile);
         
         Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestrictedNew.toString(), true, apiToken);
         restrictResponse.prettyPrint();
@@ -548,7 +547,7 @@ public void testRequestAccess() throws InterruptedException {
         assertEquals(200, revokeFileAccessResponse.getStatusCode());
 
         listAccessRequestResponse = UtilIT.getAccessRequestList(tabFile3IdRestrictedNew.toString(), apiToken);
-        assertEquals(400, listAccessRequestResponse.getStatusCode());
+        assertEquals(404, listAccessRequestResponse.getStatusCode());
     }
 
     // This is a round trip test of uploading a zipped archive, with some folder
@@ -629,7 +628,50 @@ public void testZipUploadAndDownload() throws IOException {
         System.out.println("MD5 checksums of the unzipped file streams are correct.");
         
         System.out.println("Zip upload-and-download round trip test: success!");
-        
     }
 
+    @Test
+    public void testGetUserFileAccessRequested() {
+        // Create new user
+        Response createUserResponse = UtilIT.createRandomUser();
+        createUserResponse.then().assertThat().statusCode(OK.getStatusCode());
+        String newUserApiToken = UtilIT.getApiTokenFromResponse(createUserResponse);
+
+        String dataFileId = Integer.toString(tabFile3IdRestricted);
+
+        // Call with new user and unrequested access file
+        Response getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken);
+        getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        boolean userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data");
+        assertFalse(userFileAccessRequested);
+
+        // Request file access for the new user
+        Response requestFileAccessResponse = UtilIT.requestFileAccess(dataFileId, newUserApiToken);
+        requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Call with new user and requested access file
+        getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken);
+        getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data");
+        assertTrue(userFileAccessRequested);
+    }
+
+    @Test
+    public void testGetUserPermissionsOnFile() {
+        // Call with valid file id
+        Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(basicFileId), apiToken);
+        getUserPermissionsOnFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+        boolean canDownloadFile = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canDownloadFile");
+        assertTrue(canDownloadFile);
+        boolean canEditOwnerDataset = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canEditOwnerDataset");
+        assertTrue(canEditOwnerDataset);
+        boolean canManageFilePermissions = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canManageFilePermissions");
+        assertTrue(canManageFilePermissions);
+
+        // Call with invalid file id
+        Response getUserPermissionsOnFileInvalidIdResponse = UtilIT.getUserPermissionsOnFile("testInvalidId", apiToken);
+        getUserPermissionsOnFileInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
index bcee8d18e17..b9bf09cfe68 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AdminIT.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GitHubOAuth2AP;
@@ -15,46 +15,44 @@
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import org.junit.Test;
-import org.junit.BeforeClass;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeAll;
 
 import java.util.Map;
 import java.util.UUID;
 import java.util.logging.Logger;
 
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static junit.framework.Assert.assertEquals;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.junit.Assert.assertTrue;
-import org.junit.Ignore;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class AdminIT {
 
     private static final Logger logger = Logger.getLogger(AdminIT.class.getCanonicalName());
 
-    @BeforeClass
+    private final String testNonSuperuserApiToken = createTestNonSuperuserApiToken();
+
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
     @Test
     public void testListAuthenticatedUsers() throws Exception {
-        Response anon = UtilIT.listAuthenticatedUsers("");
+        Response anon = UtilIT.listAuthenticatedUsers(testNonSuperuserApiToken);
         anon.prettyPrint();
         anon.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
 
-        Response createNonSuperuser = UtilIT.createRandomUser();
-        
-        String nonSuperuserUsername = UtilIT.getUsernameFromResponse(createNonSuperuser);
-        String nonSuperuserApiToken = UtilIT.getApiTokenFromResponse(createNonSuperuser);
-
-        Response nonSuperuser = UtilIT.listAuthenticatedUsers(nonSuperuserApiToken);
+        Response nonSuperuser = UtilIT.listAuthenticatedUsers(testNonSuperuserApiToken);
         nonSuperuser.prettyPrint();
         nonSuperuser.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
         
@@ -69,6 +67,9 @@ public void testListAuthenticatedUsers() throws Exception {
         superuser.prettyPrint();
         superuser.then().assertThat().statusCode(OK.getStatusCode());
 
+        Response createNonSuperuser = UtilIT.createRandomUser();
+        String nonSuperuserUsername = UtilIT.getUsernameFromResponse(createNonSuperuser);
+
         Response deleteNonSuperuser = UtilIT.deleteUser(nonSuperuserUsername);
         assertEquals(200, deleteNonSuperuser.getStatusCode());
 
@@ -84,7 +85,7 @@ public void testFilterAuthenticatedUsersForbidden() throws Exception {
         // --------------------------------------------
         Response anon = UtilIT.filterAuthenticatedUsers("", null, null, null, null);
         anon.prettyPrint();
-        anon.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
+        anon.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode());
 
         // --------------------------------------------
         // Forbidden: Try with a regular user--*not a superuser*
@@ -152,10 +153,10 @@ public void testFilterAuthenticatedUsers() throws Exception {
                 .body("data.pagination.pageCount", equalTo(1))
                 .body("data.pagination.numResults", equalTo(numResults));
         
-        String userIdentifer;
+        String userIdentifier;
         for (int i=0; i < numResults; i++){
-            userIdentifer = JsonPath.from(filterReponse01.getBody().asString()).getString("data.users[" + i + "].userIdentifier");
-            assertEquals(randomUsernames.contains(userIdentifer), true);
+            userIdentifier = JsonPath.from(filterReponse01.getBody().asString()).getString("data.users[" + i + "].userIdentifier");
+            assertTrue(randomUsernames.contains(userIdentifier));
         }
 
         List<Object> userList1 = JsonPath.from(filterReponse01.body().asString()).getList("data.users");
@@ -176,10 +177,10 @@ public void testFilterAuthenticatedUsers() throws Exception {
                 .body("data.pagination.pageCount", equalTo(3))
                 .body("data.pagination.numResults", equalTo(numResults));
         
-        String userIdentifer2;
+        String userIdentifier2;
         for (int i=0; i < numUsersReturned; i++){
-            userIdentifer2 = JsonPath.from(filterReponse02.getBody().asString()).getString("data.users[" + i + "].userIdentifier");
-            assertEquals(randomUsernames.contains(userIdentifer2), true);
+            userIdentifier2 = JsonPath.from(filterReponse02.getBody().asString()).getString("data.users[" + i + "].userIdentifier");
+            assertTrue(randomUsernames.contains(userIdentifier2));
         }
         
         List<Object> userList2 = JsonPath.from(filterReponse02.body().asString()).getList("data.users");
@@ -285,7 +286,7 @@ public void testConvertShibUserToBuiltin() throws Exception {
         String newEmailAddressToUse = "builtin2shib." + UUID.randomUUID().toString().substring(0, 8) + "@mailinator.com";
         String data = emailOfUserToConvert + ":" + password + ":" + newEmailAddressToUse;
 
-        Response builtinToShibAnon = UtilIT.migrateBuiltinToShib(data, "");
+        Response builtinToShibAnon = UtilIT.migrateBuiltinToShib(data, testNonSuperuserApiToken);
         builtinToShibAnon.prettyPrint();
         builtinToShibAnon.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
 
@@ -315,11 +316,11 @@ public void testConvertShibUserToBuiltin() throws Exception {
          * the Shib user has an invalid email address:
          * https://github.com/IQSS/dataverse/issues/2998
          */
-        Response shibToBuiltinAnon = UtilIT.migrateShibToBuiltin(Long.MAX_VALUE, "", "");
+        Response shibToBuiltinAnon = UtilIT.migrateShibToBuiltin(Long.MAX_VALUE, "", testNonSuperuserApiToken);
         shibToBuiltinAnon.prettyPrint();
         shibToBuiltinAnon.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
 
-        Response nonSuperuser = UtilIT.migrateShibToBuiltin(Long.MAX_VALUE, "", "");
+        Response nonSuperuser = UtilIT.migrateShibToBuiltin(Long.MAX_VALUE, "", testNonSuperuserApiToken);
         nonSuperuser.prettyPrint();
         nonSuperuser.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
 
@@ -379,7 +380,7 @@ public void testConvertDeactivateUserToShib() {
         String newEmailAddressToUse = "builtin2shib." + UUID.randomUUID().toString().substring(0, 8) + "@mailinator.com";
         String data = emailOfUserToConvert + ":" + password + ":" + newEmailAddressToUse;
 
-        Response builtinToShibAnon = UtilIT.migrateBuiltinToShib(data, "");
+        Response builtinToShibAnon = UtilIT.migrateBuiltinToShib(data, testNonSuperuserApiToken);
         builtinToShibAnon.prettyPrint();
         builtinToShibAnon.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
 
@@ -434,7 +435,7 @@ public void testConvertOAuthUserToBuiltin() throws Exception {
         String data = emailOfUserToConvert + ":" + password + ":" + newEmailAddressToUse + ":" + providerIdToConvertTo + ":" + newPersistentUserIdInLookupTable;
 
         System.out.println("data: " + data);
-        Response builtinToOAuthAnon = UtilIT.migrateBuiltinToOAuth(data, "");
+        Response builtinToOAuthAnon = UtilIT.migrateBuiltinToOAuth(data, testNonSuperuserApiToken);
         builtinToOAuthAnon.prettyPrint();
         builtinToOAuthAnon.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
 
@@ -465,11 +466,7 @@ public void testConvertOAuthUserToBuiltin() throws Exception {
          * the OAuth user has an invalid email address:
          * https://github.com/IQSS/dataverse/issues/2998
          */
-        Response oauthToBuiltinAnon = UtilIT.migrateOAuthToBuiltin(Long.MAX_VALUE, "", "");
-        oauthToBuiltinAnon.prettyPrint();
-        oauthToBuiltinAnon.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
-
-        Response nonSuperuser = UtilIT.migrateOAuthToBuiltin(Long.MAX_VALUE, "", "");
+        Response nonSuperuser = UtilIT.migrateOAuthToBuiltin(Long.MAX_VALUE, "", testNonSuperuserApiToken);
         nonSuperuser.prettyPrint();
         nonSuperuser.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
 
@@ -608,7 +605,7 @@ public void testRecalculateDataFileHash() {
         String superuserUsername = UtilIT.getUsernameFromResponse(createSuperuser);
         UtilIT.makeSuperUser(superuserUsername);
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + origFileId, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", superuserApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", superuserApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + origFileId);
 
         //Bad file id         
         Response computeDataFileHashResponse = UtilIT.computeDataFileHashValue("BadFileId", DataFile.ChecksumType.MD5.toString(), superuserApiToken);
@@ -673,7 +670,7 @@ public void testRecalculateDataFileHash() {
     }
     
     @Test
-    @Ignore
+    @Disabled
     public void testMigrateHDLToDOI() {
         /*
         This test is set to ignore because it requires a setup that will
@@ -737,6 +734,13 @@ public void testMigrateHDLToDOI() {
                 .statusCode(OK.getStatusCode());
     }
 
+    /**
+     * Disabled because once there are new fields in the database that Solr
+     * doesn't know about, dataset creation could be prevented, or at least
+     * subsequent search operations could fail because the dataset can't be
+     * indexed.
+     */
+    @Disabled
     @Test
     public void testLoadMetadataBlock_NoErrorPath() {
         Response createUser = UtilIT.createRandomUser();
@@ -781,6 +785,13 @@ public void testLoadMetadataBlock_NoErrorPath() {
         assertEquals(244, (int) statistics.get("Controlled Vocabulary"));
     }
 
+    /**
+     * Disabled because once there are new fields in the database that Solr
+     * doesn't know about, dataset creation could be prevented, or at least
+     * subsequent search operations could fail because the dataset can't be
+     * indexed.
+     */
+    @Disabled
     @Test
     public void testLoadMetadataBlock_ErrorHandling() {
         Response createUser = UtilIT.createRandomUser();
@@ -807,6 +818,16 @@ public void testLoadMetadataBlock_ErrorHandling() {
           message
         );
     }
+    @Test
+    public void testClearThumbnailFailureFlag(){
+        Response nonExistentFile = UtilIT.clearThumbnailFailureFlag(Long.MAX_VALUE);
+        nonExistentFile.prettyPrint();
+        nonExistentFile.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+        
+        Response clearAllFlags = UtilIT.clearThumbnailFailureFlags();
+        clearAllFlags.prettyPrint();
+        clearAllFlags.then().assertThat().statusCode(OK.getStatusCode());
+    }
     
     @Test
     public void testBannerMessages(){
@@ -842,4 +863,36 @@ public void testBannerMessages(){
         assertEquals("OK", status);
         
     }
+
+    /**
+     * For a successful download from /tmp, see BagIT. Here we are doing error
+     * checking.
+     */
+    @Test
+    public void testDownloadTmpFile() throws IOException {
+
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String username = UtilIT.getUsernameFromResponse(createUser);
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response tryToDownloadAsNonSuperuser = UtilIT.downloadTmpFile("/tmp/foo", apiToken);
+        tryToDownloadAsNonSuperuser.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
+
+        Response toggleSuperuser = UtilIT.makeSuperUser(username);
+        toggleSuperuser.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        Response tryToDownloadEtcPasswd = UtilIT.downloadTmpFile("/etc/passwd", apiToken);
+        tryToDownloadEtcPasswd.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode())
+                .body("status", equalTo("ERROR"))
+                .body("message", equalTo("Path must begin with '/tmp' but after normalization was '/etc/passwd'."));
+    }
+
+    private String createTestNonSuperuserApiToken() {
+        Response createUserResponse = UtilIT.createRandomUser();
+        createUserResponse.then().assertThat().statusCode(OK.getStatusCode());
+        return UtilIT.getApiTokenFromResponse(createUserResponse);
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AuxiliaryFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AuxiliaryFilesIT.java
index 0e404f6ba97..754350e93db 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AuxiliaryFilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AuxiliaryFilesIT.java
@@ -1,26 +1,26 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import static javax.ws.rs.core.Response.Status.CONFLICT;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CONFLICT;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Assert;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class AuxiliaryFilesIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -62,7 +62,7 @@ public void testUploadAuxFiles() throws IOException {
 
         Long fileId = JsonPath.from(uploadFile.body().asString()).getLong("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToDataFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToDataFile);
 
         Response restrictFile = UtilIT.restrictFile(fileId.toString(), true, apiToken);
         restrictFile.prettyPrint();
@@ -243,29 +243,29 @@ public void testUploadAuxFiles() throws IOException {
         // Download JSON aux file.
         Response downloadAuxFileJson = UtilIT.downloadAuxFile(fileId, formatTagJson, formatVersionJson, apiToken);
         downloadAuxFileJson.then().assertThat().statusCode(OK.getStatusCode());
-        Assert.assertEquals("attachment; filename=\"data.tab.dpJson_0.1.json\"", downloadAuxFileJson.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.dpJson_0.1.json\"", downloadAuxFileJson.header("Content-disposition"));
 
         // Download XML aux file.
         Response downloadAuxFileXml = UtilIT.downloadAuxFile(fileId, formatTagXml, formatVersionXml, apiToken);
         downloadAuxFileXml.then().assertThat().statusCode(OK.getStatusCode());
-        Assert.assertEquals("attachment; filename=\"data.tab.dpXml_0.1.xml\"", downloadAuxFileXml.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.dpXml_0.1.xml\"", downloadAuxFileXml.header("Content-disposition"));
 
         // Download PDF aux file.
         Response downloadAuxFilePdf = UtilIT.downloadAuxFile(fileId, formatTagPdf, formatVersionPdf, apiToken);
         downloadAuxFilePdf.then().assertThat().statusCode(OK.getStatusCode());
-        Assert.assertEquals("attachment; filename=\"data.tab.dpPdf_0.1.pdf\"", downloadAuxFilePdf.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.dpPdf_0.1.pdf\"", downloadAuxFilePdf.header("Content-disposition"));
 
         // Download Markdown aux file.
         Response downloadAuxFileMd = UtilIT.downloadAuxFile(fileId, formatTagMd, formatVersionMd, apiToken);
         downloadAuxFileMd.then().assertThat().statusCode(OK.getStatusCode());
         // No file extenstion here because Tika's getDefaultMimeTypes doesn't include "text/markdown".
         // Note: browsers seem to add ".bin" ("myfile.bin") rather than no extension ("myfile").
-        Assert.assertEquals("attachment; filename=\"data.tab.README_0.1\"", downloadAuxFileMd.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.README_0.1\"", downloadAuxFileMd.header("Content-disposition"));
 
         // Download Markdown aux file with no MIME type given
         Response downloadAuxFileNoMime1 = UtilIT.downloadAuxFile(fileId, formatTagNoMimeType1, formatVersionNoMimeType1, apiToken);
         downloadAuxFileNoMime1.then().assertThat().statusCode(OK.getStatusCode());
-        Assert.assertEquals("attachment; filename=\"data.tab.noMimeType1_0.1.txt\"", downloadAuxFileNoMime1.header("Content-disposition"));
+        assertEquals("attachment; filename=\"data.tab.noMimeType1_0.1.txt\"", downloadAuxFileNoMime1.header("Content-disposition"));
 
         Response createUserNoPrivs = UtilIT.createRandomUser();
         createUserNoPrivs.then().assertThat().statusCode(OK.getStatusCode());
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java
index 4ac76ac846d..28f7fa28328 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/BagIT.java
@@ -1,18 +1,33 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
 import edu.harvard.iq.dataverse.engine.command.impl.LocalSubmitToArchiveCommand;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.response.Response;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.util.Enumeration;
+import java.util.Scanner;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+import org.junit.jupiter.api.AfterAll;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class BagIT {
 
-    @BeforeClass
+    static String bagitExportDir = "/tmp";
+
+    @BeforeAll
     public static void setUpClass() {
 
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
@@ -25,14 +40,14 @@ public static void setUpClass() {
         setArchiverSettings.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
-        Response setBagItLocalPath = UtilIT.setSetting(":BagItLocalPath", "/tmp");
+        Response setBagItLocalPath = UtilIT.setSetting(":BagItLocalPath", bagitExportDir);
         setBagItLocalPath.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
     }
 
     @Test
-    public void testBagItExport() {
+    public void testBagItExport() throws IOException {
 
         Response createUser = UtilIT.createRandomUser();
         createUser.then().assertThat().statusCode(OK.getStatusCode());
@@ -63,9 +78,81 @@ public void testBagItExport() {
         archiveDataset.prettyPrint();
         archiveDataset.then().assertThat().statusCode(OK.getStatusCode());
 
+        // spaceName comes from LocalSubmitToArchiveCommand
+        String spaceName = datasetPid.replace(':', '-').replace('/', '-')
+                .replace('.', '-').toLowerCase();
+        // spacename: doi-10-5072-fk2-fosg5q
+
+        String pathToZip = bagitExportDir + "/" + spaceName + "v1.0" + ".zip";
+
+        try {
+            // give the bag time to generate
+            Thread.sleep(3000);
+        } catch (InterruptedException ex) {
+        }
+
+        // A bag could look like this:
+        //doi-10-5072-FK2-DKUTDUv-1-0/data/
+        //doi-10-5072-FK2-DKUTDUv-1-0/data/Darwin's Finches/
+        //doi-10-5072-FK2-DKUTDUv-1-0/metadata/
+        //doi-10-5072-FK2-DKUTDUv-1-0/metadata/pid-mapping.txt
+        //doi-10-5072-FK2-DKUTDUv-1-0/manifest-md5.txt
+        //doi-10-5072-FK2-DKUTDUv-1-0/bagit.txt
+        //doi-10-5072-FK2-DKUTDUv-1-0/metadata/oai-ore.jsonld
+        //doi-10-5072-FK2-DKUTDUv-1-0/metadata/datacite.xml
+        //doi-10-5072-FK2-DKUTDUv-1-0/bag-info.txt
+        // ---
+        // bag-info.txt could look like this:
+        //Contact-Name: Finch, Fiona
+        //Contact-Email: finch@mailinator.com
+        //Source-Organization: Dataverse Installation (<Site Url>)
+        //Organization-Address: <Full address>
+        //Organization-Email: <Email address>
+        //External-Description: Darwin's finches (also known as the Galápagos finches) are a group of about
+        // fifteen species of passerine birds.
+        //Bagging-Date: 2023-11-14
+        //External-Identifier: https://doi.org/10.5072/FK2/LZIGBC
+        //Bag-Size: 0 bytes
+        //Payload-Oxum: 0.0
+        //Internal-Sender-Identifier: Root:Darwin's Finches
+        Response downloadBag = UtilIT.downloadTmpFile(pathToZip, apiToken);
+        downloadBag.then().assertThat().statusCode(OK.getStatusCode());
+        Path outputPath = Paths.get("/tmp/foo.zip");
+        java.nio.file.Files.copy(downloadBag.getBody().asInputStream(), outputPath, StandardCopyOption.REPLACE_EXISTING);
+
+        ZipFile zipFile = new ZipFile(outputPath.toString());
+        Enumeration<? extends ZipEntry> entries = zipFile.entries();
+        String sourceOrg = null;
+        String orgAddress = null;
+        String orgEmail = null;
+        while (entries.hasMoreElements()) {
+            ZipEntry entry = entries.nextElement();
+            String name = entry.getName();
+            System.out.println("name: " + name);
+            if (name.endsWith("bag-info.txt")) {
+                InputStream stream = zipFile.getInputStream(entry);
+                Scanner s = new Scanner(stream).useDelimiter("\\A");
+                String result = s.hasNext() ? s.next() : "";
+                System.out.println("result: " + result);
+                String[] lines = result.split("\n");
+                for (String line : lines) {
+                    if (line.startsWith("Source-Organization")) {
+                        sourceOrg = line;
+                    } else if (line.startsWith("Organization-Address")) {
+                        orgAddress = line;
+                    } else if (line.startsWith("Organization-Email")) {
+                        orgEmail = line;
+                    } else {
+                    }
+                }
+            }
+        }
+        assertEquals("Source-Organization: Dataverse Installation (<Site Url>)", sourceOrg.trim());
+        assertEquals("Organization-Address: <Full address>", orgAddress.trim());
+        assertEquals("Organization-Email: <Email address>", orgEmail.trim());
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
 
         // Not checking if delete happened. Hopefully, it did.
@@ -75,4 +162,4 @@ public static void tearDownClass() {
 
     }
 
-}
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BatchImportIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BatchImportIT.java
index 89ad79817d8..c72fe19e494 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/BatchImportIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/BatchImportIT.java
@@ -1,15 +1,15 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
 import java.io.File;
-import com.jayway.restassured.response.Response;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static javax.ws.rs.core.Response.Status.ACCEPTED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.CREATED;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static jakarta.ws.rs.core.Response.Status.ACCEPTED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
 import org.hamcrest.CoreMatchers;
 
 public class BatchImportIT {
@@ -21,7 +21,7 @@ public class BatchImportIT {
     public BatchImportIT() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java
index 0f8385409a3..af938cbebe1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/BuiltinUsersIT.java
@@ -1,10 +1,11 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.http.ContentType;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.AbstractMap;
 import java.util.Arrays;
@@ -14,18 +15,17 @@
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.Matchers.startsWith;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class BuiltinUsersIT {
 
@@ -36,7 +36,7 @@ public class BuiltinUsersIT {
     private static final String usernameKey = "userName";
     private static final String emailKey = "email";
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
 
@@ -93,8 +93,8 @@ public void testFindByToken() {
         getUserAsJsonByToken = UtilIT.getAuthenticatedUserByToken("badcode");
         getUserAsJsonByToken.then().assertThat()
                 .body("status", equalTo("ERROR"))
-                .body("message", equalTo("User with token badcode not found."))
-                .statusCode(BAD_REQUEST.getStatusCode());
+                .body("message", equalTo(ApiKeyAuthMechanism.RESPONSE_MESSAGE_BAD_API_KEY))
+                .statusCode(UNAUTHORIZED.getStatusCode());
 
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ConfirmEmailIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ConfirmEmailIT.java
index e00dba2263b..0fef3d7166e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/ConfirmEmailIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/ConfirmEmailIT.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import static junit.framework.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
 import static org.hamcrest.CoreMatchers.nullValue;
 import static org.hamcrest.Matchers.startsWith;
 
@@ -18,7 +18,7 @@ public class ConfirmEmailIT {
 
     private static final Logger logger = Logger.getLogger(ConfirmEmailIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataCiteIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataCiteIT.java
index 86c3eed4297..bb4c64dedcf 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataCiteIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataCiteIT.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import static junit.framework.Assert.assertEquals;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 /**
  * These tests will only work if you are using "DataCite" rather than "EZID" for
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java
new file mode 100644
index 00000000000..facb3f7c784
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java
@@ -0,0 +1,48 @@
+package edu.harvard.iq.dataverse.api;
+
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import java.util.ArrayList;
+
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class DataRetrieverApiIT {
+
+    @BeforeAll
+    public static void setUpClass() {
+        RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+    }
+
+    @Test
+    public void testRetrieveMyDataAsJsonString() {
+        // Call with bad API token
+        ArrayList<Long> emptyRoleIdsList = new ArrayList<>();
+        Response badApiTokenResponse = UtilIT.retrieveMyDataAsJsonString("bad-token", "dummy-user-identifier", emptyRoleIdsList);
+        badApiTokenResponse.then().assertThat().body("status", equalTo(ApiConstants.STATUS_ERROR)).body("message", equalTo(ApiKeyAuthMechanism.RESPONSE_MESSAGE_BAD_API_KEY)).statusCode(UNAUTHORIZED.getStatusCode());
+
+        // Call as superuser with invalid user identifier
+        Response createUserResponse = UtilIT.createRandomUser();
+        Response makeSuperUserResponse = UtilIT.makeSuperUser(UtilIT.getUsernameFromResponse(createUserResponse));
+        assertEquals(OK.getStatusCode(), makeSuperUserResponse.getStatusCode());
+        String superUserApiToken = UtilIT.getApiTokenFromResponse(createUserResponse);
+
+        String badUserIdentifier = "bad-identifier";
+        Response invalidUserIdentifierResponse = UtilIT.retrieveMyDataAsJsonString(superUserApiToken, badUserIdentifier, emptyRoleIdsList);
+        assertEquals("{\"success\":false,\"error_message\":\"No user found for: \\\"" + badUserIdentifier + "\\\"\"}", invalidUserIdentifierResponse.prettyPrint());
+        assertEquals(OK.getStatusCode(), invalidUserIdentifierResponse.getStatusCode());
+
+        // Call as superuser with valid user identifier
+        Response createSecondUserResponse = UtilIT.createRandomUser();
+        String userIdentifier = UtilIT.getUsernameFromResponse(createSecondUserResponse);
+        Response validUserIdentifierResponse = UtilIT.retrieveMyDataAsJsonString(superUserApiToken, userIdentifier, emptyRoleIdsList);
+        assertEquals("{\"success\":false,\"error_message\":\"Sorry, you have no assigned roles.\"}", validUserIdentifierResponse.prettyPrint());
+        assertEquals(OK.getStatusCode(), validUserIdentifierResponse.getStatusCode());
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
index 559e5a7dfba..ca99960f240 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetFieldServiceApiTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.api;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class DatasetFieldServiceApiTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index 3c4706f10c2..928574eb82b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -1,42 +1,63 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.response.Response;
+import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean;
+import edu.harvard.iq.dataverse.FileSearchCriteria;
+import io.restassured.RestAssured;
+
+import static edu.harvard.iq.dataverse.DatasetVersion.ARCHIVE_NOTE_MAX_LENGTH;
+import static edu.harvard.iq.dataverse.api.ApiConstants.*;
+import static io.restassured.RestAssured.given;
+
+import io.restassured.path.json.JsonPath;
+import io.restassured.http.ContentType;
+import io.restassured.response.Response;
+
+import java.time.LocalDate;
+import java.time.format.DateTimeFormatter;
+import java.util.*;
 import java.util.logging.Logger;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.mockito.Mockito;
+
+import org.apache.commons.lang3.RandomStringUtils;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import org.skyscreamer.jsonassert.JSONAssert;
-import org.junit.Ignore;
-import com.jayway.restassured.path.json.JsonPath;
-
-import java.util.List;
-import java.util.Map;
-import javax.json.JsonObject;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED;
+import org.junit.jupiter.api.Disabled;
+
+import jakarta.json.JsonObject;
+
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED;
+import static jakarta.ws.rs.core.Response.Status.CONFLICT;
+import static jakarta.ws.rs.core.Response.Status.NO_CONTENT;
+
 import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.DataverseServiceBean;
 
 import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER;
+
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO;
+import edu.harvard.iq.dataverse.dataaccess.GlobusOverlayAccessIOTest;
+import edu.harvard.iq.dataverse.dataaccess.StorageIO;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import java.util.UUID;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.exception.ExceptionUtils;
 
-import com.jayway.restassured.parsing.Parser;
-import static com.jayway.restassured.path.json.JsonPath.with;
-import com.jayway.restassured.path.xml.XmlPath;
+import io.restassured.parsing.Parser;
+
+import static io.restassured.path.json.JsonPath.with;
+
+import io.restassured.path.xml.XmlPath;
+
 import static edu.harvard.iq.dataverse.api.UtilIT.equalToCI;
+
 import edu.harvard.iq.dataverse.authorization.groups.impl.builtin.AuthenticatedUsers;
 import edu.harvard.iq.dataverse.datavariable.VarGroup;
 import edu.harvard.iq.dataverse.datavariable.VariableMetadata;
@@ -44,6 +65,7 @@
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.util.json.JSONLDUtil;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
 import java.io.File;
 import java.io.IOException;
@@ -52,31 +74,31 @@
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.nio.file.Files;
-import java.util.ArrayList;
-import java.util.HashMap;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.core.Response.Status;
-import static javax.ws.rs.core.Response.Status.CONFLICT;
-
-import static javax.ws.rs.core.Response.Status.NO_CONTENT;
-import static javax.ws.rs.core.Response.Status.OK;
+
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.core.Response.Status;
 import javax.xml.stream.XMLInputFactory;
 import javax.xml.stream.XMLStreamException;
 import javax.xml.stream.XMLStreamReader;
-import static org.junit.Assert.assertEquals;
+
+import static java.lang.Thread.sleep;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import org.hamcrest.CoreMatchers;
+
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.hasItems;
 import static org.hamcrest.CoreMatchers.startsWith;
 import static org.hamcrest.CoreMatchers.nullValue;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.hamcrest.Matchers.contains;
+
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 
 public class DatasetsIT {
@@ -85,7 +107,7 @@ public class DatasetsIT {
     
     
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         
         
@@ -98,6 +120,13 @@ public static void setUpClass() {
         Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport);
         removeExcludeEmail.then().assertThat()
                 .statusCode(200);
+
+        Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
+        removeAnonymizedFieldTypeNames.then().assertThat()
+                .statusCode(200);
+
+        UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths);
+
         /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work
                
         Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl);
@@ -109,8 +138,9 @@ public static void setUpClass() {
                 .statusCode(200);
          */
     }
+    
 
-    @AfterClass
+    @AfterAll
     public static void afterClass() {
 
         Response removeIdentifierGenerationStyle = UtilIT.deleteSetting(SettingsServiceBean.Key.IdentifierGenerationStyle);
@@ -120,6 +150,13 @@ public static void afterClass() {
         Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport);
         removeExcludeEmail.then().assertThat()
                 .statusCode(200);
+
+        Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
+        removeAnonymizedFieldTypeNames.then().assertThat()
+                .statusCode(200);
+
+        UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths);
+
         /* See above
         Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl);
         removeDcmUrl.then().assertThat()
@@ -130,6 +167,59 @@ public static void afterClass() {
                 .statusCode(200);
          */
     }
+    
+    @Test
+    public void testCollectionSchema(){
+        
+        Response createUser = UtilIT.createRandomUser();
+        createUser.prettyPrint();
+        String username = UtilIT.getUsernameFromResponse(createUser);
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.prettyPrint();
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+        
+        Response getCollectionSchemaResponse =  UtilIT.getCollectionSchema(dataverseAlias, apiToken);
+        getCollectionSchemaResponse.prettyPrint();
+        getCollectionSchemaResponse.then().assertThat()
+                .statusCode(200);
+
+        JsonObject expectedSchema = null;
+        try {
+            expectedSchema = JsonUtil.getJsonObjectFromFile("doc/sphinx-guides/source/_static/api/dataset-schema.json");
+        } catch (IOException ex) {
+        }
+
+        assertEquals(JsonUtil.prettyPrint(expectedSchema), JsonUtil.prettyPrint(getCollectionSchemaResponse.body().asString()));
+        
+        String expectedJson = UtilIT.getDatasetJson("scripts/search/tests/data/dataset-finch1.json");
+        
+        Response validateDatasetJsonResponse = UtilIT.validateDatasetJson(dataverseAlias, expectedJson, apiToken);
+        validateDatasetJsonResponse.prettyPrint();
+        validateDatasetJsonResponse.then().assertThat()
+                .statusCode(200);
+        
+        
+        String pathToJsonFile = "scripts/search/tests/data/datasetMissingReqFields.json"; 
+        
+        String jsonIn = UtilIT.getDatasetJson(pathToJsonFile);
+        
+        Response validateBadDatasetJsonResponse = UtilIT.validateDatasetJson(dataverseAlias, jsonIn, apiToken);
+        validateBadDatasetJsonResponse.prettyPrint();
+        validateBadDatasetJsonResponse.then().assertThat()
+                .statusCode(200);
+
+        
+        validateBadDatasetJsonResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body(containsString("failed validation"));
+        
+        Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, apiToken);
+        deleteDataverseResponse.prettyPrint();
+        assertEquals(200, deleteDataverseResponse.getStatusCode());
+        
+    }
 
     @Test
     public void testCreateDataset() {
@@ -473,7 +563,7 @@ public void testCreatePublishDestroyDataset() {
         assertTrue(datasetContactFromExport.toString().contains("finch@mailinator.com"));
         assertTrue(firstValue.toString().contains("finch@mailinator.com"));
 
-        Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, ":latest-published", apiToken);
+        Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken);
         getDatasetVersion.prettyPrint();
         getDatasetVersion.then().assertThat()
                 .body("data.datasetId", equalTo(datasetId))
@@ -517,6 +607,18 @@ public void testCreatePublishDestroyDataset() {
         }
         assertEquals(datasetPersistentId, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.docDscr.citation.titlStmt.IDNo"));
 
+        // Test includeDeaccessioned option
+        Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // includeDeaccessioned false
+        getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken, false, false);
+        getDatasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        // includeDeaccessioned true
+        getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken, false, true);
+        getDatasetVersion.then().assertThat().statusCode(OK.getStatusCode());
+
         Response deleteDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken);
         deleteDatasetResponse.prettyPrint();
         assertEquals(200, deleteDatasetResponse.getStatusCode());
@@ -531,6 +633,103 @@ public void testCreatePublishDestroyDataset() {
 
     }
 
+    /**
+     * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid}
+     * are already called from other RestAssured tests, in this class and also in FilesIT. 
+     * But this test is dedicated to this api specifically, and focuses on the 
+     * functionality added to it in 6.1. 
+    */
+    @Test
+    public void testDatasetVersionsAPI() {
+        // Create user
+        String apiToken = UtilIT.createRandomUserGetToken();
+
+        // Create user with no permission
+        String apiTokenNoPerms = UtilIT.createRandomUserGetToken();
+
+        // Create Collection
+        String collectionAlias = UtilIT.createRandomCollectionGetAlias(apiToken);
+
+        // Create Dataset
+        Response createDataset = UtilIT.createRandomDatasetViaNativeApi(collectionAlias, apiToken);
+        createDataset.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset);
+        String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId");
+
+        // Upload file
+        String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
+        Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+        uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
+        
+        // Check that the file we just uploaded is shown by the versions api:
+        Response unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken);
+        unpublishedDraft.prettyPrint();
+        unpublishedDraft.then().assertThat()
+                .body("data.files.size()", equalTo(1))
+                .statusCode(OK.getStatusCode());
+        
+        // Now check that the file is NOT shown, when we ask the versions api to 
+        // skip files: 
+        boolean skipFiles = true; 
+        unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken, skipFiles, false);
+        unpublishedDraft.prettyPrint();
+        unpublishedDraft.then().assertThat()
+                .body("data.files", equalTo(null))
+                .statusCode(OK.getStatusCode());
+
+        // Publish collection and dataset
+        UtilIT.publishDataverseViaNativeApi(collectionAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());
+        UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode());
+
+        // Upload another file: 
+        String pathToFile2 = "src/main/webapp/resources/images/cc0.png";
+        Response uploadResponse2 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile2, apiToken);
+        uploadResponse2.then().assertThat().statusCode(OK.getStatusCode());
+       
+        // We should now have a published version, and a draft. 
+        
+        // Call /versions api, *with the owner api token*, make sure both 
+        // versions are listed; also check that the correct numbers of files 
+        // are shown in each version (2 in the draft, 1 in the published). 
+        Response versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken);
+        versionsResponse.prettyPrint();
+        versionsResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.size()", equalTo(2))
+                .body("data[0].files.size()", equalTo(2))
+                .body("data[1].files.size()", equalTo(1));
+        
+        // Now call this api with the new (as of 6.1) pagination parameters
+        Integer offset = 0;
+        Integer howmany = 1;
+        versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken, offset, howmany);
+        // (the above should return only one version, the draft)
+        versionsResponse.prettyPrint();
+        versionsResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.size()", equalTo(1))
+                .body("data[0].files.size()", equalTo(2));
+                
+        // And now call it with an un-privileged token, to make sure only one 
+        // (the published) version is shown:
+        
+        versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms);
+        versionsResponse.prettyPrint();
+        versionsResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.size()", equalTo(1));
+        
+        // And now call the "short", no-files version of the same api
+        versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms, skipFiles);
+        versionsResponse.prettyPrint();
+        versionsResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].files", equalTo(null));
+    }
+
+    
     /**
      * This test requires the root dataverse to be published to pass.
      */
@@ -643,9 +842,7 @@ public void testExport() {
         exportDatasetAsDdi.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
-        // This is now returning [] instead of sammi@sample.com. Not sure why.
-        // :ExcludeEmailFromExport is absent so the email should be shown.
-        assertEquals("[]", XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.stdyInfo.contact.@email"));
+        assertEquals(null, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.stdyInfo.contact.@email"));
         assertEquals(datasetPersistentId, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.docDscr.citation.titlStmt.IDNo"));
 
         Response reexportAllFormats = UtilIT.reexportDatasetAllFormats(datasetPersistentId);
@@ -735,7 +932,7 @@ public void testExcludeEmail() {
 
         assertEquals("Dataverse, Admin", XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.citation.distStmt.contact"));
         // no "sammi@sample.com" to be found https://github.com/IQSS/dataverse/issues/3443
-        assertEquals("[]", XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.citation.distStmt.contact.@email"));
+        assertEquals(null, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.citation.distStmt.contact.@email"));
         assertEquals("Sample Datasets, inc.", XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.stdyDscr.citation.distStmt.contact.@affiliation"));
         assertEquals(datasetPersistentId, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.docDscr.citation.titlStmt.IDNo"));
 
@@ -859,7 +1056,7 @@ public void testPrivateUrl() {
         String username = UtilIT.getUsernameFromResponse(createUser);
         String apiToken = UtilIT.getApiTokenFromResponse(createUser);
 
-        Response failToCreateWhenDatasetIdNotFound = UtilIT.privateUrlCreate(Integer.MAX_VALUE, apiToken);
+        Response failToCreateWhenDatasetIdNotFound = UtilIT.privateUrlCreate(Integer.MAX_VALUE, apiToken, false);
         failToCreateWhenDatasetIdNotFound.prettyPrint();
         assertEquals(NOT_FOUND.getStatusCode(), failToCreateWhenDatasetIdNotFound.getStatusCode());
 
@@ -889,7 +1086,7 @@ public void testPrivateUrl() {
         grantRole.prettyPrint();
         assertEquals(OK.getStatusCode(), grantRole.getStatusCode());
         UtilIT.getRoleAssignmentsOnDataverse(dataverseAlias, apiToken).prettyPrint();
-        Response contributorDoesNotHavePermissionToCreatePrivateUrl = UtilIT.privateUrlCreate(datasetId, contributorApiToken);
+        Response contributorDoesNotHavePermissionToCreatePrivateUrl = UtilIT.privateUrlCreate(datasetId, contributorApiToken, false);
         contributorDoesNotHavePermissionToCreatePrivateUrl.prettyPrint();
         assertEquals(UNAUTHORIZED.getStatusCode(), contributorDoesNotHavePermissionToCreatePrivateUrl.getStatusCode());
 
@@ -917,7 +1114,7 @@ public void testPrivateUrl() {
         pristine.prettyPrint();
         assertEquals(NOT_FOUND.getStatusCode(), pristine.getStatusCode());
 
-        Response createPrivateUrl = UtilIT.privateUrlCreate(datasetId, apiToken);
+        Response createPrivateUrl = UtilIT.privateUrlCreate(datasetId, apiToken, false);
         createPrivateUrl.prettyPrint();
         assertEquals(OK.getStatusCode(), createPrivateUrl.getStatusCode());
 
@@ -1052,7 +1249,7 @@ public void testPrivateUrl() {
         Response downloadFile = UtilIT.downloadFile(fileId, tokenForPrivateUrlUser);
         assertEquals(OK.getStatusCode(), downloadFile.getStatusCode());
         Response downloadFileBadToken = UtilIT.downloadFile(fileId, "junk");
-        assertEquals(FORBIDDEN.getStatusCode(), downloadFileBadToken.getStatusCode());
+        assertEquals(UNAUTHORIZED.getStatusCode(), downloadFileBadToken.getStatusCode());
         Response notPermittedToListRoleAssignment = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, userWithNoRolesApiToken);
         assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignment.getStatusCode());
         Response roleAssignments = UtilIT.getRoleAssignmentsOnDataset(datasetId.toString(), null, apiToken);
@@ -1077,11 +1274,11 @@ public void testPrivateUrl() {
         shouldNoLongerExist.prettyPrint();
         assertEquals(NOT_FOUND.getStatusCode(), shouldNoLongerExist.getStatusCode());
 
-        Response createPrivateUrlUnauth = UtilIT.privateUrlCreate(datasetId, userWithNoRolesApiToken);
+        Response createPrivateUrlUnauth = UtilIT.privateUrlCreate(datasetId, userWithNoRolesApiToken, false);
         createPrivateUrlUnauth.prettyPrint();
         assertEquals(UNAUTHORIZED.getStatusCode(), createPrivateUrlUnauth.getStatusCode());
 
-        Response createPrivateUrlAgain = UtilIT.privateUrlCreate(datasetId, apiToken);
+        Response createPrivateUrlAgain = UtilIT.privateUrlCreate(datasetId, apiToken, false);
         createPrivateUrlAgain.prettyPrint();
         assertEquals(OK.getStatusCode(), createPrivateUrlAgain.getStatusCode());
 
@@ -1097,11 +1294,11 @@ public void testPrivateUrl() {
         tryToDeleteAlreadyDeletedPrivateUrl.prettyPrint();
         assertEquals(NOT_FOUND.getStatusCode(), tryToDeleteAlreadyDeletedPrivateUrl.getStatusCode());
 
-        Response createPrivateUrlOnceAgain = UtilIT.privateUrlCreate(datasetId, apiToken);
+        Response createPrivateUrlOnceAgain = UtilIT.privateUrlCreate(datasetId, apiToken, false);
         createPrivateUrlOnceAgain.prettyPrint();
         assertEquals(OK.getStatusCode(), createPrivateUrlOnceAgain.getStatusCode());
 
-        Response tryToCreatePrivateUrlWhenExisting = UtilIT.privateUrlCreate(datasetId, apiToken);
+        Response tryToCreatePrivateUrlWhenExisting = UtilIT.privateUrlCreate(datasetId, apiToken, false);
         tryToCreatePrivateUrlWhenExisting.prettyPrint();
         assertEquals(FORBIDDEN.getStatusCode(), tryToCreatePrivateUrlWhenExisting.getStatusCode());
 
@@ -1120,7 +1317,7 @@ public void testPrivateUrl() {
         List<JsonObject> noAssignmentsForPrivateUrlUser = with(publishingShouldHaveRemovedRoleAssignmentForPrivateUrlUser.body().asString()).param("member", "member").getJsonObject("data.findAll { data -> data._roleAlias == member }");
         assertEquals(0, noAssignmentsForPrivateUrlUser.size());
 
-        Response tryToCreatePrivateUrlToPublishedVersion = UtilIT.privateUrlCreate(datasetId, apiToken);
+        Response tryToCreatePrivateUrlToPublishedVersion = UtilIT.privateUrlCreate(datasetId, apiToken, false);
         tryToCreatePrivateUrlToPublishedVersion.prettyPrint();
         assertEquals(FORBIDDEN.getStatusCode(), tryToCreatePrivateUrlToPublishedVersion.getStatusCode());
 
@@ -1129,12 +1326,12 @@ public void testPrivateUrl() {
         updatedMetadataResponse.prettyPrint();
         assertEquals(OK.getStatusCode(), updatedMetadataResponse.getStatusCode());
 
-        Response createPrivateUrlForPostVersionOneDraft = UtilIT.privateUrlCreate(datasetId, apiToken);
+        Response createPrivateUrlForPostVersionOneDraft = UtilIT.privateUrlCreate(datasetId, apiToken, false);
         createPrivateUrlForPostVersionOneDraft.prettyPrint();
         assertEquals(OK.getStatusCode(), createPrivateUrlForPostVersionOneDraft.getStatusCode());
 
         // A Contributor has DeleteDatasetDraft
-        Response deleteDraftVersionAsContributor = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, ":draft", contributorApiToken);
+        Response deleteDraftVersionAsContributor = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, DS_VERSION_DRAFT, contributorApiToken);
         deleteDraftVersionAsContributor.prettyPrint();
         deleteDraftVersionAsContributor.then().assertThat()
                 .statusCode(OK.getStatusCode())
@@ -1156,7 +1353,7 @@ public void testPrivateUrl() {
          * a dataset is destroy. Still, we'll keep this test in here in case we
          * switch Private URL back to being its own table in the future.
          */
-        Response createPrivateUrlToMakeSureItIsDeletedWithDestructionOfDataset = UtilIT.privateUrlCreate(datasetId, apiToken);
+        Response createPrivateUrlToMakeSureItIsDeletedWithDestructionOfDataset = UtilIT.privateUrlCreate(datasetId, apiToken, false);
         createPrivateUrlToMakeSureItIsDeletedWithDestructionOfDataset.prettyPrint();
         assertEquals(OK.getStatusCode(), createPrivateUrlToMakeSureItIsDeletedWithDestructionOfDataset.getStatusCode());
 
@@ -1475,7 +1672,7 @@ public void testCreateDatasetWithDcmDependency() {
         getRsyncScriptPermErrorGuest.then().assertThat()
                 .statusCode(UNAUTHORIZED.getStatusCode())
                 .contentType(ContentType.JSON)
-                .body("message", equalTo("Please provide a key query parameter (?key=XXX) or via the HTTP header X-Dataverse-key"));
+                .body("message", equalTo(AbstractApiBean.RESPONSE_MESSAGE_AUTHENTICATED_USER_REQUIRED));
 
         Response createNoPermsUser = UtilIT.createRandomUser();
         String noPermsUsername = UtilIT.getUsernameFromResponse(createNoPermsUser);
@@ -1828,7 +2025,7 @@ public void testCreateDeleteDatasetLink() {
     }
     
     @Test
-    @Ignore
+    @Disabled
     public void testApiErrors() {
 
         /*
@@ -1946,7 +2143,7 @@ public void testDatasetLocksApi() {
                 break;
             } 
         }
-        assertTrue("Lock missing from the output of /api/datasets/locks", lockListedCorrectly);        
+        assertTrue(lockListedCorrectly, "Lock missing from the output of /api/datasets/locks");
         
         // Try the same, but with an api token of a random, non-super user 
         // (this should get rejected):
@@ -1976,7 +2173,7 @@ public void testDatasetLocksApi() {
                 break;
             } 
         }
-        assertTrue("Lock missing from the output of /api/datasets/locks?type=Ingest", lockListedCorrectly);        
+        assertTrue(lockListedCorrectly, "Lock missing from the output of /api/datasets/locks?type=Ingest");
 
         
         // Try to list locks of an invalid type:
@@ -2037,7 +2234,7 @@ public void testDatasetLocksApi() {
      * This test requires the root dataverse to be published to pass.
      */
     @Test
-    @Ignore
+    @Disabled
     public void testUpdatePIDMetadataAPI() {
 
         Response createUser = UtilIT.createRandomUser();
@@ -2249,6 +2446,71 @@ public void testLinkingDatasets() {
          */
     }
 
+    /**
+     * This tests the "DDI export" and verifies that variable metadata is included for an unrestricted file.
+     */
+    @Test
+    public void testUnrestrictedFileExportDdi() throws IOException {
+
+        Response createUser = UtilIT.createRandomUser();
+        createUser.prettyPrint();
+        String authorUsername = UtilIT.getUsernameFromResponse(createUser);
+        String authorApiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverse = UtilIT.createRandomDataverse(authorApiToken);
+        createDataverse.prettyPrint();
+        createDataverse.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse);
+
+        Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, authorApiToken);
+        createDataset.prettyPrint();
+        createDataset.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset);
+        String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId");
+
+        Path pathToFile = Paths.get(java.nio.file.Files.createTempDirectory(null) + File.separator + "data.csv");
+        String contentOfCsv = ""
+                + "name,pounds,species\n"
+                + "Marshall,40,dog\n"
+                + "Tiger,17,cat\n"
+                + "Panther,21,cat\n";
+        java.nio.file.Files.write(pathToFile, contentOfCsv.getBytes());
+
+        Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile.toString(), authorApiToken);
+        uploadFile.prettyPrint();
+        uploadFile.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.files[0].label", equalTo("data.csv"));
+
+        String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id");
+
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
+
+        Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, authorApiToken);
+        publishDataverse.then().assertThat().statusCode(OK.getStatusCode());
+        Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPid, "major", authorApiToken);
+        publishDataset.then().assertThat().statusCode(OK.getStatusCode());
+
+        // We're testing export here, which is at dataset level.
+        // Guest/public version
+        Response exportByGuest = UtilIT.exportDataset(datasetPid, "ddi");
+        exportByGuest.prettyPrint();
+        exportByGuest.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("codeBook.fileDscr.fileTxt.fileName", equalTo("data.tab"))
+                .body("codeBook.fileDscr.fileTxt.dimensns.caseQnty", equalTo("3"))
+                .body("codeBook.fileDscr.fileTxt.dimensns.varQnty", equalTo("3"))
+                .body("codeBook.dataDscr", CoreMatchers.not(equalTo(null)))
+                .body("codeBook.dataDscr.var[0].@name", equalTo("name"))
+                .body("codeBook.dataDscr.var[1].@name", equalTo("pounds"))
+                // This is an example of a summary stat (max) that should be visible.
+                .body("codeBook.dataDscr.var[1].sumStat.find { it.@type == 'max' }", equalTo("40.0"))
+                .body("codeBook.dataDscr.var[2].@name", equalTo("species"));
+    }
+        
     /**
      * In this test we are restricting a file and testing "export DDI" at the
      * dataset level as well as getting the DDI at the file level.
@@ -2297,7 +2559,7 @@ public void testRestrictFileExportDdi() throws IOException {
 
         String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response restrictFile = UtilIT.restrictFile(fileId, true, authorApiToken);
         restrictFile.prettyPrint();
@@ -2318,7 +2580,7 @@ public void testRestrictFileExportDdi() throws IOException {
 
         // Here we are asserting that dataDscr is empty. TODO: Do this in REST Assured.
         String dataDscrForGuest = XmlPath.from(exportByGuest.asString()).getString("codeBook.dataDscr");
-        Assert.assertEquals("", dataDscrForGuest);
+        assertEquals("", dataDscrForGuest);
 
         // Author export (has access)
         Response exportByAuthor = UtilIT.exportDataset(datasetPid, "ddi", authorApiToken);
@@ -2329,7 +2591,7 @@ public void testRestrictFileExportDdi() throws IOException {
 
         // Here we are asserting that dataDscr is empty. TODO: Do this in REST Assured.
         String dataDscrForAuthor = XmlPath.from(exportByAuthor.asString()).getString("codeBook.dataDscr");
-        Assert.assertEquals("", dataDscrForAuthor);
+        assertEquals("", dataDscrForAuthor);
 
         // Now we are testing file-level retrieval.
         // The author has access to a restricted file and gets all the metadata.
@@ -2380,7 +2642,8 @@ public void testSemanticMetadataAPIs() {
         String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
 
         // Create a dataset using native api
-        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        // (this test requires that we create a dataset without the license set; note the extra boolean arg.:)
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken, true);
         createDatasetResponse.prettyPrint();
         Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse);
 
@@ -2714,7 +2977,7 @@ public void testCuratePublishedDatasetVersionCommand() throws IOException {
 
         // Give file time to ingest
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest);
         
         Response origXml = UtilIT.getFileMetadata(origFileId, null, apiToken);
         assertEquals(200, origXml.getStatusCode());
@@ -2854,7 +3117,7 @@ public void testRestrictFileTermsOfUseAndAccess() throws IOException {
 
         String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response restrictFile = UtilIT.restrictFile(fileId, true, authorApiToken);
         restrictFile.prettyPrint();
@@ -2924,7 +3187,7 @@ public void testRestrictFilesWORequestAccess() throws IOException {
 
         String fileId = JsonPath.from(uploadFile.body().asString()).getString("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response restrictFile = UtilIT.restrictFile(fileId, true, authorApiToken);
         restrictFile.prettyPrint();
@@ -3050,4 +3313,919 @@ public void testArchivalStatusAPI() throws IOException {
 
     }
 
+    @Test
+    public void testGetDatasetSummaryFieldNames() {
+        Response summaryFieldNamesResponse = UtilIT.getDatasetSummaryFieldNames();
+        summaryFieldNamesResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                // check for any order
+                .body("data", hasItems("dsDescription", "subject", "keyword", "publication", "notesText"))
+                // check for exact order
+                .body("data", contains("dsDescription", "subject", "keyword", "publication", "notesText"));
+    }
+
+    @Test
+    public void getPrivateUrlDatasetVersion() {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        // Non-anonymized test
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        UtilIT.privateUrlCreate(datasetId, apiToken, false).then().assertThat().statusCode(OK.getStatusCode());
+        Response privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken);
+        privateUrlGet.then().assertThat().statusCode(OK.getStatusCode());
+        String tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token");
+
+        // We verify that the response contains the dataset associated to the private URL token
+        Response getPrivateUrlDatasetVersionResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser);
+        getPrivateUrlDatasetVersionResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.datasetId", equalTo(datasetId));
+
+        // Test anonymized
+        Response setAnonymizedFieldsSettingResponse = UtilIT.setSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames, "author");
+        setAnonymizedFieldsSettingResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        UtilIT.privateUrlCreate(datasetId, apiToken, true).then().assertThat().statusCode(OK.getStatusCode());
+        privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken);
+        privateUrlGet.then().assertThat().statusCode(OK.getStatusCode());
+        tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token");
+
+        Response getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion(tokenForPrivateUrlUser);
+        getPrivateUrlDatasetVersionAnonymizedResponse.prettyPrint();
+
+        // We verify that the response is anonymized for the author field
+        getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.datasetId", equalTo(datasetId))
+                .body("data.metadataBlocks.citation.fields[1].value", equalTo(BundleUtil.getStringFromBundle("dataset.anonymized.withheld")))
+                .body("data.metadataBlocks.citation.fields[1].typeClass", equalTo("primitive"))
+                .body("data.metadataBlocks.citation.fields[1].multiple", equalTo(false));
+
+        // Similar to the check above but doesn't rely on fields[1]
+        List<JsonObject> authors = with(getPrivateUrlDatasetVersionAnonymizedResponse.body().asString()).param("fieldToFind", "author")
+                .getJsonObject("data.metadataBlocks.citation.fields.findAll { fields -> fields.typeName == fieldToFind }");
+        Map firstAuthor = authors.get(0);
+        String value = (String) firstAuthor.get("value");
+        assertEquals(BundleUtil.getStringFromBundle("dataset.anonymized.withheld"), value);
+
+        UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames);
+
+        // Test invalid token
+        getPrivateUrlDatasetVersionAnonymizedResponse = UtilIT.getPrivateUrlDatasetVersion("invalidToken");
+        getPrivateUrlDatasetVersionAnonymizedResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+    }
+
+    @Test
+    public void getPrivateUrlDatasetVersionCitation() {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        UtilIT.privateUrlCreate(datasetId, apiToken, false).then().assertThat().statusCode(OK.getStatusCode());
+        Response privateUrlGet = UtilIT.privateUrlGet(datasetId, apiToken);
+        String tokenForPrivateUrlUser = JsonPath.from(privateUrlGet.body().asString()).getString("data.token");
+
+        Response getPrivateUrlDatasetVersionCitation = UtilIT.getPrivateUrlDatasetVersionCitation(tokenForPrivateUrlUser);
+        getPrivateUrlDatasetVersionCitation.prettyPrint();
+
+        getPrivateUrlDatasetVersionCitation.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                // We check that the returned message contains information expected for the citation string
+                .body("data.message", containsString("DRAFT VERSION"));
+    }
+
+    @Test
+    public void getDatasetVersionCitation() {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, false, apiToken);
+        getDatasetVersionCitationResponse.prettyPrint();
+
+        getDatasetVersionCitationResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                // We check that the returned message contains information expected for the citation string
+                .body("data.message", containsString("DRAFT VERSION"));
+
+        // Test Deaccessioned
+        Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
+        publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+        publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // includeDeaccessioned false
+        Response getDatasetVersionCitationNotDeaccessioned = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_LATEST_PUBLISHED, false, apiToken);
+        getDatasetVersionCitationNotDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        // includeDeaccessioned true
+        Response getDatasetVersionCitationDeaccessioned =  UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_LATEST_PUBLISHED, true, apiToken);
+        getDatasetVersionCitationDeaccessioned.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.message", containsString("DEACCESSIONED VERSION"));
+    }
+
+    @Test
+    public void getVersionFiles() throws IOException, InterruptedException {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId");
+        Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        String testFileName1 = "test_1.txt";
+        String testFileName2 = "test_2.txt";
+        String testFileName3 = "test_3.txt";
+        String testFileName4 = "test_4.txt";
+        String testFileName5 = "test_5.png";
+
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken);
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken);
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken);
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName5, new byte[300], apiToken);
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName4, new byte[400], apiToken);
+
+        String testDatasetVersion = ":latest";
+
+        // Test pagination and NameAZ order criteria (the default criteria)
+        int testPageSize = 2;
+
+        // Test page 1
+        Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, null, null, null, null, null, null, null, false, apiToken);
+
+        getVersionFilesResponsePaginated.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName1))
+                .body("data[1].label", equalTo(testFileName2));
+
+        int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size();
+        assertEquals(testPageSize, fileMetadatasCount);
+
+        String testFileId1 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[0].dataFile.id");
+        String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id");
+
+        // Test page 2
+        getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, testPageSize, null, null, null, null, null, null, false, apiToken);
+
+        getVersionFilesResponsePaginated.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName3))
+                .body("data[1].label", equalTo(testFileName4));
+
+        fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size();
+        assertEquals(testPageSize, fileMetadatasCount);
+
+        // Test page 3 (last)
+        getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, testPageSize * 2, null, null, null, null, null, null, false, apiToken);
+
+        getVersionFilesResponsePaginated.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName5));
+
+        fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size();
+        assertEquals(1, fileMetadatasCount);
+
+        // Test NameZA order criteria
+        Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.NameZA.toString(), false, apiToken);
+
+        getVersionFilesResponseNameZACriteria.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName5))
+                .body("data[1].label", equalTo(testFileName4))
+                .body("data[2].label", equalTo(testFileName3))
+                .body("data[3].label", equalTo(testFileName2))
+                .body("data[4].label", equalTo(testFileName1));
+
+        // Test Newest order criteria
+        Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Newest.toString(), false, apiToken);
+
+        getVersionFilesResponseNewestCriteria.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName4))
+                .body("data[1].label", equalTo(testFileName5))
+                .body("data[2].label", equalTo(testFileName3))
+                .body("data[3].label", equalTo(testFileName2))
+                .body("data[4].label", equalTo(testFileName1));
+
+        // Test Oldest order criteria
+        Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Oldest.toString(), false, apiToken);
+
+        getVersionFilesResponseOldestCriteria.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName1))
+                .body("data[1].label", equalTo(testFileName2))
+                .body("data[2].label", equalTo(testFileName3))
+                .body("data[3].label", equalTo(testFileName5))
+                .body("data[4].label", equalTo(testFileName4));
+
+        // Test Size order criteria
+        Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Size.toString(), false, apiToken);
+
+        getVersionFilesResponseSizeCriteria.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName1))
+                .body("data[1].label", equalTo(testFileName3))
+                .body("data[2].label", equalTo(testFileName2))
+                .body("data[3].label", equalTo(testFileName5))
+                .body("data[4].label", equalTo(testFileName4));
+
+        // Test Type order criteria
+        Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Type.toString(), false, apiToken);
+
+        getVersionFilesResponseTypeCriteria.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName5))
+                .body("data[1].label", equalTo(testFileName1))
+                .body("data[2].label", equalTo(testFileName2))
+                .body("data[3].label", equalTo(testFileName3))
+                .body("data[4].label", equalTo(testFileName4));
+
+        // Test invalid order criteria
+        String invalidOrderCriteria = "invalidOrderCriteria";
+        Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, invalidOrderCriteria, false, apiToken);
+        getVersionFilesResponseInvalidOrderCriteria.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria));
+
+        // Test Content Type
+        Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, "image/png", null, null, null, null, null, false, apiToken);
+
+        getVersionFilesResponseContentType.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName5));
+
+        fileMetadatasCount = getVersionFilesResponseContentType.jsonPath().getList("data").size();
+        assertEquals(1, fileMetadatasCount);
+
+        // Test Category Name
+        String testCategory = "testCategory";
+        Response setFileCategoriesResponse = UtilIT.setFileCategories(testFileId1, apiToken, List.of(testCategory));
+        setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode());
+        setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory));
+        setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, testCategory, null, null, null, false, apiToken);
+
+        getVersionFilesResponseCategoryName.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName1))
+                .body("data[1].label", equalTo(testFileName2));
+
+        fileMetadatasCount = getVersionFilesResponseCategoryName.jsonPath().getList("data").size();
+        assertEquals(2, fileMetadatasCount);
+
+        // Test Access Status Restricted
+        Response restrictFileResponse = UtilIT.restrictFile(String.valueOf(testFileId1), true, apiToken);
+        restrictFileResponse.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, FileSearchCriteria.FileAccessStatus.Restricted.toString(), null, null, null, null, false, apiToken);
+
+        getVersionFilesResponseRestricted.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName1));
+
+        fileMetadatasCount = getVersionFilesResponseRestricted.jsonPath().getList("data").size();
+        assertEquals(1, fileMetadatasCount);
+
+        // Test Access Status Embargoed
+        UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12");
+        String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
+
+        // Create embargo for test file 1 (Embargoed and Restricted)
+        Response createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId1), activeEmbargoDate, apiToken);
+
+        createActiveFileEmbargoResponse.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        // Create embargo for test file 2 (Embargoed and Public)
+        createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId2), activeEmbargoDate, apiToken);
+
+        createActiveFileEmbargoResponse.then().assertThat()
+                .statusCode(OK.getStatusCode());
+
+        Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, false, apiToken);
+
+        getVersionFilesResponseEmbargoedThenPublic.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName2));
+
+        fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size();
+        assertEquals(1, fileMetadatasCount);
+
+        Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, false, apiToken);
+
+        getVersionFilesResponseEmbargoedThenRestricted.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName1));
+
+        fileMetadatasCount = getVersionFilesResponseEmbargoedThenRestricted.jsonPath().getList("data").size();
+        assertEquals(1, fileMetadatasCount);
+
+        // Test Access Status Public
+        Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, null, false, apiToken);
+
+        getVersionFilesResponsePublic.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName3))
+                .body("data[1].label", equalTo(testFileName4))
+                .body("data[2].label", equalTo(testFileName5));
+
+        fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size();
+        assertEquals(3, fileMetadatasCount);
+
+        // Test invalid access status
+        String invalidStatus = "invalidStatus";
+        Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, invalidStatus, null, null, null, null, false, apiToken);
+        getVersionFilesResponseInvalidStatus.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus))));
+
+        // Test Search Text
+        Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, "test_1", null, false, apiToken);
+
+        getVersionFilesResponseSearchText.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName1));
+
+        fileMetadatasCount = getVersionFilesResponseSearchText.jsonPath().getList("data").size();
+        assertEquals(1, fileMetadatasCount);
+
+        // Test Deaccessioned
+        Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
+        publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+        publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // includeDeaccessioned false
+        Response getVersionFilesResponseNoDeaccessioned = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, false, apiToken);
+        getVersionFilesResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        // includeDeaccessioned true
+        Response getVersionFilesResponseDeaccessioned = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, true, apiToken);
+        getVersionFilesResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode());
+
+        getVersionFilesResponseDeaccessioned.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo(testFileName1))
+                .body("data[1].label", equalTo(testFileName2))
+                .body("data[2].label", equalTo(testFileName3))
+                .body("data[3].label", equalTo(testFileName4))
+                .body("data[4].label", equalTo(testFileName5));
+
+        // Test Tabular Tag Name
+        String pathToTabularTestFile = "src/test/resources/tab/test.tab";
+        Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
+
+        // Ensure tabular file is ingested
+        sleep(2000);
+
+        String tabularTagName = "Survey";
+        Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName));
+        setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response getVersionFilesResponseTabularTagName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, tabularTagName, null, null, false, apiToken);
+
+        getVersionFilesResponseTabularTagName.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data[0].label", equalTo("test.tab"));
+
+        fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size();
+        assertEquals(1, fileMetadatasCount);
+
+        // Test that the dataset files for a deaccessioned dataset cannot be accessed by a guest
+        // By latest published version
+        Response getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, true, null);
+        getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+        // By specific version 1.0
+        getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, "1.0", null, null, null, null, null, null, null, null, true, null);
+        getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+    }
+
+    @Test
+    public void getVersionFileCounts() throws IOException, InterruptedException {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId");
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        // Creating test files
+        String testFileName1 = "test_1.txt";
+        String testFileName2 = "test_2.txt";
+        String testFileName3 = "test_3.png";
+
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken);
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken);
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken);
+
+        // Creating a categorized test file
+        String pathToTestFile = "src/test/resources/images/coffeeshop.png";
+        Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
+        String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
+        String testCategory = "testCategory";
+        Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, List.of(testCategory));
+        setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Setting embargo for file (Embargo and Public)
+        UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12");
+        String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd"));
+        Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken);
+        createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Getting the file counts and assert each count
+        Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, null, null, null, null, false, apiToken);
+
+        getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath();
+        LinkedHashMap<String, Integer> responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType");
+        LinkedHashMap<String, Integer> responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName");
+        LinkedHashMap<String, Integer> responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName");
+        LinkedHashMap<String, Integer> responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus");
+
+        assertEquals(4, (Integer) responseJsonPath.get("data.total"));
+        assertEquals(2, responseCountPerContentTypeMap.get("image/png"));
+        assertEquals(2, responseCountPerContentTypeMap.get("text/plain"));
+        assertEquals(2, responseCountPerContentTypeMap.size());
+        assertEquals(1, responseCountPerCategoryNameMap.get(testCategory));
+        assertEquals(0, responseCountPerTabularTagNameMap.size());
+        assertEquals(2, responseCountPerAccessStatusMap.size());
+        assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString()));
+        assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString()));
+
+        // Test content type criteria
+        getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, "image/png", null, null, null, null, false, apiToken);
+        getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        responseJsonPath = getVersionFileCountsResponse.jsonPath();
+        responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType");
+        responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName");
+        responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName");
+        responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus");
+
+        assertEquals(2, (Integer) responseJsonPath.get("data.total"));
+        assertEquals(2, responseCountPerContentTypeMap.get("image/png"));
+        assertEquals(1, responseCountPerContentTypeMap.size());
+        assertEquals(1, responseCountPerCategoryNameMap.size());
+        assertEquals(1, responseCountPerCategoryNameMap.get(testCategory));
+        assertEquals(0, responseCountPerTabularTagNameMap.size());
+        assertEquals(2, responseCountPerAccessStatusMap.size());
+        assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString()));
+        assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString()));
+
+        // Test access status criteria
+        getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, false, apiToken);
+
+        getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        responseJsonPath = getVersionFileCountsResponse.jsonPath();
+        responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType");
+        responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName");
+        responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName");
+        responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus");
+
+        assertEquals(3, (Integer) responseJsonPath.get("data.total"));
+        assertEquals(1, responseCountPerContentTypeMap.get("image/png"));
+        assertEquals(2, responseCountPerContentTypeMap.get("text/plain"));
+        assertEquals(2, responseCountPerContentTypeMap.size());
+        assertEquals(0, responseCountPerCategoryNameMap.size());
+        assertEquals(0, responseCountPerTabularTagNameMap.size());
+        assertEquals(1, responseCountPerAccessStatusMap.size());
+        assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString()));
+
+        // Test invalid access status
+        String invalidStatus = "invalidStatus";
+        Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, invalidStatus, null, null, null, false, apiToken);
+        getVersionFilesResponseInvalidStatus.then().assertThat()
+                .statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus))));
+
+        // Test category name criteria
+        getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, null, testCategory, null, null, false, apiToken);
+
+        getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        responseJsonPath = getVersionFileCountsResponse.jsonPath();
+        responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType");
+        responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName");
+        responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName");
+        responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus");
+
+        assertEquals(1, (Integer) responseJsonPath.get("data.total"));
+        assertEquals(1, responseCountPerContentTypeMap.get("image/png"));
+        assertEquals(1, responseCountPerContentTypeMap.size());
+        assertEquals(1, responseCountPerCategoryNameMap.size());
+        assertEquals(1, responseCountPerCategoryNameMap.get(testCategory));
+        assertEquals(0, responseCountPerTabularTagNameMap.size());
+        assertEquals(1, responseCountPerAccessStatusMap.size());
+        assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString()));
+
+        // Test search text criteria
+        getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, null, null, null, "test", false, apiToken);
+
+        getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        responseJsonPath = getVersionFileCountsResponse.jsonPath();
+        responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType");
+        responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName");
+        responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName");
+        responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus");
+
+        assertEquals(3, (Integer) responseJsonPath.get("data.total"));
+        assertEquals(1, responseCountPerContentTypeMap.get("image/png"));
+        assertEquals(2, responseCountPerContentTypeMap.get("text/plain"));
+        assertEquals(2, responseCountPerContentTypeMap.size());
+        assertEquals(0, responseCountPerCategoryNameMap.size());
+        assertEquals(0, responseCountPerTabularTagNameMap.size());
+        assertEquals(1, responseCountPerAccessStatusMap.size());
+        assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString()));
+
+        // Test tabular tag name criteria
+        String pathToTabularTestFile = "src/test/resources/tab/test.tab";
+        Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
+
+        // Ensure tabular file is ingested
+        sleep(2000);
+
+        String tabularTagName = "Survey";
+        Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName));
+        setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, null, null, tabularTagName, null, false, apiToken);
+
+        getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        responseJsonPath = getVersionFileCountsResponse.jsonPath();
+        responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType");
+        responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName");
+        responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName");
+        responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus");
+
+        assertEquals(1, (Integer) responseJsonPath.get("data.total"));
+        assertEquals(1, responseCountPerContentTypeMap.get("text/tab-separated-values"));
+        assertEquals(1, responseCountPerContentTypeMap.size());
+        assertEquals(0, responseCountPerCategoryNameMap.size());
+        assertEquals(1, responseCountPerTabularTagNameMap.size());
+        assertEquals(1, responseCountPerTabularTagNameMap.get(tabularTagName));
+        assertEquals(1, responseCountPerAccessStatusMap.size());
+        assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString()));
+
+        // Test Deaccessioned
+        Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
+        publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+        publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // includeDeaccessioned false
+        Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, false, apiToken);
+        getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        // includeDeaccessioned true
+        Response getVersionFileCountsResponseDeaccessioned = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, true, apiToken);
+        getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode());
+
+        responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath();
+        assertEquals(5, (Integer) responseJsonPath.get("data.total"));
+
+        // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest
+        // By latest published version
+        Response getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, true, null);
+        getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+        // By specific version 1.0
+        getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, "1.0", null, null, null, null, null, true, null);
+        getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+    }
+
+    @Test
+    public void deaccessionDataset() {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        String testDeaccessionReason = "Test deaccession reason.";
+        String testDeaccessionForwardURL = "http://demo.dataverse.org";
+
+        // Test that draft and latest version constants are not allowed and a bad request error is received
+        String expectedInvalidVersionIdentifierError = BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED));
+
+        Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_DRAFT, testDeaccessionReason, testDeaccessionForwardURL, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo(expectedInvalidVersionIdentifierError));
+
+        deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST, testDeaccessionReason, testDeaccessionForwardURL, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo(expectedInvalidVersionIdentifierError));
+
+        // Test that a not found error occurs when there is no published version available
+        deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        // Publish test dataset
+        Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
+        publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+        publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Test that a bad request error is received when the forward URL exceeds ARCHIVE_NOTE_MAX_LENGTH
+        String testInvalidDeaccessionForwardURL = RandomStringUtils.randomAlphabetic(ARCHIVE_NOTE_MAX_LENGTH + 1);
+
+        deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testInvalidDeaccessionForwardURL, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", containsString(testInvalidDeaccessionForwardURL));
+
+        // Test that the dataset is successfully deaccessioned when published and valid deaccession params are sent
+        deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Test that a not found error occurs when the only published version has already been deaccessioned
+        deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        // Test that a dataset can be deaccessioned without forward URL
+        createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+        publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, null, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+    }
+
+    @Test
+    public void getDownloadSize() throws IOException, InterruptedException {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId");
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        // Creating test text files
+        String testFileName1 = "test_1.txt";
+        String testFileName2 = "test_2.txt";
+
+        int testFileSize1 = 50;
+        int testFileSize2 = 200;
+
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[testFileSize1], apiToken);
+        UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[testFileSize2], apiToken);
+
+        int expectedTextFilesStorageSize = testFileSize1 + testFileSize2;
+
+        // Get the total size when there are no tabular files
+        Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken);
+        getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data.storageSize", equalTo(expectedTextFilesStorageSize));
+
+        // Upload test tabular file
+        String pathToTabularTestFile = "src/test/resources/tab/test.tab";
+        Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        int tabularOriginalSize = 157;
+
+        // Ensure tabular file is ingested
+        Thread.sleep(2000);
+
+        // Get the total size ignoring the original tabular file sizes
+        getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), false, apiToken);
+        getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize"));
+
+        // Assert that the size has been incremented with the last uploaded file
+        assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize);
+
+        // Get the total size including only original sizes and ignoring archival sizes for tabular files
+        int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize;
+
+        getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), false, apiToken);
+        getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular));
+
+        // Get the total size including both the original and archival tabular file sizes
+        int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize;
+        int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize;
+
+        getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken);
+        getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes));
+
+        // Get the total size sending invalid file download size mode
+        String invalidMode = "invalidMode";
+        getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, invalidMode, false, apiToken);
+        getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode())
+                .body("message", equalTo("Invalid mode: " + invalidMode));
+
+        // Upload second test tabular file (same source as before)
+        uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Ensure tabular file is ingested
+        Thread.sleep(2000);
+
+        // Get the total size including only original sizes and ignoring archival sizes for tabular files
+        expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular;
+
+        getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), false, apiToken);
+        getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular));
+
+        // Get the total size including both the original and archival tabular file sizes
+        expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes;
+
+        getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken);
+        getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes));
+
+        // Get the total size including both the original and archival tabular file sizes with search criteria
+        getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, "text/plain", FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, "test_", DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken);
+        // We exclude tabular sizes from the expected result since the search criteria filters by content type "text/plain" and search text "test_"
+        int expectedSizeIncludingAllSizesAndApplyingCriteria = testFileSize1 + testFileSize2;
+        getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data.storageSize", equalTo(expectedSizeIncludingAllSizesAndApplyingCriteria));
+
+        // Test Deaccessioned
+        Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
+        publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+        publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken);
+        deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // includeDeaccessioned false
+        Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken);
+        getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        // includeDeaccessioned true
+        Response getVersionFileCountsResponseDeaccessioned = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, apiToken);
+        getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest
+        // By latest published version
+        Response getVersionFileCountsGuestUserResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, null);
+        getVersionFileCountsGuestUserResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+        // By specific version 1.0
+        getVersionFileCountsGuestUserResponse = UtilIT.getDownloadSize(datasetId, "1.0", null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, null);
+        getVersionFileCountsGuestUserResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+    }
+
+    @Test
+    public void testGetUserPermissionsOnDataset() {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        // Call with valid dataset id
+        Response getUserPermissionsOnDatasetResponse = UtilIT.getUserPermissionsOnDataset(Integer.toString(datasetId), apiToken);
+        getUserPermissionsOnDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+        boolean canViewUnpublishedDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canViewUnpublishedDataset");
+        assertTrue(canViewUnpublishedDataset);
+        boolean canEditDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canEditDataset");
+        assertTrue(canEditDataset);
+        boolean canPublishDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canPublishDataset");
+        assertTrue(canPublishDataset);
+        boolean canManageDatasetPermissions = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canManageDatasetPermissions");
+        assertTrue(canManageDatasetPermissions);
+        boolean canDeleteDatasetDraft = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canDeleteDatasetDraft");
+        assertTrue(canDeleteDatasetDraft);
+
+        // Call with invalid dataset id
+        Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getUserPermissionsOnDataset("testInvalidId", apiToken);
+        getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+    }
+    
+    //Requires that a Globus remote store be set up as with the parameters in the GlobusOverlayAccessIOTest class
+    //Tests whether the API call succeeds and has some of the expected parameters
+    @Test
+    @Disabled
+    public void testGetGlobusUploadParameters() {
+        //Creates managed and remote Globus stores
+        GlobusOverlayAccessIOTest.setUp();
+
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        String username = UtilIT.getUsernameFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+        
+        Response makeSuperUser = UtilIT.makeSuperUser(username);
+        assertEquals(200, makeSuperUser.getStatusCode());
+        
+        Response setDriver = UtilIT.setDatasetStorageDriver(datasetId, System.getProperty("dataverse.files.globusr.label"), apiToken);
+        assertEquals(200, setDriver.getStatusCode());
+        
+        Response getUploadParams = UtilIT.getDatasetGlobusUploadParameters(datasetId, "en_us", apiToken);
+        assertEquals(200, getUploadParams.getStatusCode());
+        JsonObject data = JsonUtil.getJsonObject(getUploadParams.getBody().asString());
+        JsonObject queryParams = data.getJsonObject("queryParameters");
+        assertEquals("en_us", queryParams.getString("dvLocale"));
+        assertEquals("false", queryParams.getString("managed"));
+        //Assumes only one reference endpoint with a basepath is configured
+        assertTrue(queryParams.getJsonArray("referenceEndpointsWithPaths").get(0).toString().indexOf(System.getProperty("dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS)) > -1);
+        JsonArray signedUrls = data.getJsonArray("signedUrls");
+        boolean found = false;
+        for (int i = 0; i < signedUrls.size(); i++) {
+            JsonObject signedUrl = signedUrls.getJsonObject(i);
+            if (signedUrl.getString("name").equals("requestGlobusReferencePaths")) {
+                found=true;
+                break;
+            }
+        }
+        assertTrue(found);
+        //Removes managed and remote Globus stores
+        GlobusOverlayAccessIOTest.tearDown();
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsTest.java
index fded590d9db..58aa366c9e5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsTest.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.api;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import java.util.HashSet;
 import java.util.Set;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class DatasetsTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
index 75215bb700e..78ece6ecc42 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import static com.jayway.restassured.path.json.JsonPath.with;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import static io.restassured.path.json.JsonPath.with;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -13,27 +13,28 @@
 import java.nio.file.Paths;
 import java.util.Arrays;
 import java.util.List;
-import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
-import javax.ws.rs.core.Response.Status;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.core.Response.Status;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import java.nio.file.Files;
-import com.jayway.restassured.path.json.JsonPath;
-import static javax.ws.rs.core.Response.Status.OK;
+import io.restassured.path.json.JsonPath;
 import org.hamcrest.CoreMatchers;
 import org.hamcrest.Matchers;
 
@@ -41,12 +42,12 @@ public class DataversesIT {
 
     private static final Logger logger = Logger.getLogger(DataversesIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
     
-    @AfterClass
+    @AfterAll
     public static void afterClass() {
         Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport);
     }
@@ -145,6 +146,44 @@ public void testMinimalDataverse() throws FileNotFoundException {
         deleteDataverse.then().assertThat().statusCode(OK.getStatusCode());
     }
 
+    /**
+     * A regular user can create a Dataverse Collection and access its
+     * GuestbookResponses by DV alias or ID.
+     * A request for a non-existent Dataverse's GuestbookResponses returns
+     * Not Found.
+     * A regular user cannot access the guestbook responses for a Dataverse
+     * that they do not have permissions for, like the root Dataverse.
+     */
+    @Test
+    public void testGetGuestbookResponses() {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.prettyPrint();
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response create = UtilIT.createRandomDataverse(apiToken);
+        create.prettyPrint();
+        create.then().assertThat().statusCode(CREATED.getStatusCode());
+        String alias = UtilIT.getAliasFromResponse(create);
+        Integer dvId = UtilIT.getDataverseIdFromResponse(create);
+
+        logger.info("Request guestbook responses for non-existent Dataverse");
+        Response getResponsesByBadAlias = UtilIT.getGuestbookResponses("-1", null, apiToken);
+        getResponsesByBadAlias.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        logger.info("Request guestbook responses for existent Dataverse by alias");
+        Response getResponsesByAlias = UtilIT.getGuestbookResponses(alias, null, apiToken);
+        getResponsesByAlias.then().assertThat().statusCode(OK.getStatusCode());
+
+        logger.info("Request guestbook responses for existent Dataverse by ID");
+        Response getResponsesById = UtilIT.getGuestbookResponses(dvId.toString(), null, apiToken);
+        getResponsesById.then().assertThat().statusCode(OK.getStatusCode());
+
+        logger.info("Request guestbook responses for root Dataverse by alias");
+        getResponsesById = UtilIT.getGuestbookResponses("root", null, apiToken);
+        getResponsesById.prettyPrint();
+        getResponsesById.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
+    }
+
     @Test
     public void testNotEnoughJson() {
         Response createUser = UtilIT.createRandomUser();
@@ -373,7 +412,7 @@ public void testMoveDataverse() {
         while (checkIndex) {
             try {   
                     try {
-                        Thread.sleep(2000);
+                        Thread.sleep(4000);
                     } catch (InterruptedException ex) {
                     }                
                 Response search = UtilIT.search("id:dataverse_" + dataverseId + "&subtree=" + dataverseAlias2, apiToken);
@@ -510,6 +549,13 @@ public void testImportDDI() throws IOException, InterruptedException {
         logger.info(importDDI.prettyPrint());
         assertEquals(201, importDDI.getStatusCode());
 
+        // Under normal conditions, you shouldn't need to destroy these datasets.
+        // Uncomment if they're still around from a previous failed run.
+//        Response destroy1 = UtilIT.destroyDataset("doi:10.5072/FK2/ABCD11", apiToken);
+//        destroy1.prettyPrint();
+//        Response destroy2 = UtilIT.destroyDataset("doi:10.5072/FK2/ABCD22", apiToken);
+//        destroy2.prettyPrint();
+
         Response importDDIPid = UtilIT.importDatasetDDIViaNativeApi(apiToken, dataverseAlias, xml,  "doi:10.5072/FK2/ABCD11", "no");
         logger.info(importDDIPid.prettyPrint());
         assertEquals(201, importDDIPid.getStatusCode());
@@ -561,13 +607,9 @@ public void testImportDDI() throws IOException, InterruptedException {
         Integer datasetIdIntPidRel = JsonPath.from(importDDIPidRel.body().asString()).getInt("data.id");
         Response destroyDatasetResponsePidRel = UtilIT.destroyDataset(datasetIdIntPidRel, apiToken);
         assertEquals(200, destroyDatasetResponsePidRel.getStatusCode());
-
-        // This last dataset we have just imported, let's give it a sec. to finish indexing (?)
-        // or whatever it is that may still be happening. (Have been seeing intermittent 500 from the next
-        // destroyDataset() line lately)
-        
-        Thread.sleep(1000L); 
         
+        UtilIT.sleepForDeadlock(UtilIT.MAXIMUM_IMPORT_DURATION);
+
         Integer datasetIdIntRelease = JsonPath.from(importDDIRelease.body().asString()).getInt("data.id");
         Response destroyDatasetResponseRelease = UtilIT.destroyDataset(datasetIdIntRelease, apiToken);
         assertEquals(200, destroyDatasetResponseRelease.getStatusCode());
@@ -579,4 +621,47 @@ public void testImportDDI() throws IOException, InterruptedException {
         assertEquals(200, deleteUserResponse.getStatusCode());
     }
     
+    @Test
+    public void testAttributesApi() throws Exception {
+
+        Response createUser = UtilIT.createRandomUser();
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        if (createDataverseResponse.getStatusCode() != 201) {
+            System.out.println("A workspace for testing (a dataverse) couldn't be created in the root dataverse. The output was:\n\n" + createDataverseResponse.body().asString());
+            System.out.println("\nPlease ensure that users can created dataverses in the root in order for this test to run.");
+        } else {
+            createDataverseResponse.prettyPrint();
+        }
+        assertEquals(201, createDataverseResponse.getStatusCode());
+
+        String collectionAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+        String newCollectionAlias = collectionAlias + "RENAMED";
+        
+        // Change the alias of the collection: 
+        
+        Response changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "alias", newCollectionAlias, apiToken);
+        changeAttributeResp.prettyPrint();
+        
+        changeAttributeResp.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("message.message", equalTo("Update successful"));
+        
+        // Check on the collection, under the new alias: 
+        
+        Response collectionInfoResponse = UtilIT.exportDataverse(newCollectionAlias, apiToken);
+        collectionInfoResponse.prettyPrint();
+        
+        collectionInfoResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.alias", equalTo(newCollectionAlias));
+        
+        // Delete the collection (again, using its new alias):
+        
+        Response deleteCollectionResponse = UtilIT.deleteDataverse(newCollectionAlias, apiToken);
+        deleteCollectionResponse.prettyPrint();
+        assertEquals(OK.getStatusCode(), deleteCollectionResponse.getStatusCode());
+    }
+    
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesTest.java
index 10113110b66..512b07912dd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesTest.java
@@ -12,6 +12,7 @@
 import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO;
 import edu.harvard.iq.dataverse.api.imports.ImportServiceBean;
 import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
 import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlockFacetsCommand;
 import edu.harvard.iq.dataverse.engine.command.impl.UpdateMetadataBlockFacetRootCommand;
@@ -21,17 +22,19 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.ArgumentCaptor;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
 
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.Response;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.Response;
+import org.mockito.junit.jupiter.MockitoExtension;
+
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -41,7 +44,7 @@
  *
  * @author adaybujeda
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class DataversesTest {
     // From AbstractApiBean class
     @Mock
@@ -68,13 +71,15 @@ public class DataversesTest {
     private DataverseServiceBean dataverseService;
     @Mock
     private SwordServiceBean swordService;
+    @Mock
+    private ContainerRequestContext containerRequestContext;
 
     @InjectMocks
     private Dataverses target;
 
     private Dataverse VALID_DATAVERSE;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         VALID_DATAVERSE = new Dataverse();
         VALID_DATAVERSE.setId(MocksFactory.nextId());
@@ -84,13 +89,14 @@ public void beforeEachTest() {
         Mockito.lenient().when(dataverseService.findByAlias(VALID_DATAVERSE.getAlias())).thenReturn(VALID_DATAVERSE);
         Mockito.lenient().when(httpRequest.getHeader("X-Dataverse-key")).thenReturn(UUID.randomUUID().toString());
         Mockito.lenient().when(privateUrlSvc.getPrivateUrlUserFromToken(Mockito.anyString())).thenReturn(new PrivateUrlUser(0));
+        Mockito.lenient().when(containerRequestContext.getProperty(ApiConstants.CONTAINER_REQUEST_CONTEXT_USER)).thenReturn(new AuthenticatedUser());
     }
 
     @Test
     public void listMetadataBlockFacets_should_return_404_when_dataverse_is_not_found() {
         String dataverseAlias = UUID.randomUUID().toString();
         Mockito.when(dataverseService.findByAlias(dataverseAlias)).thenReturn(null);
-        Response result = target.listMetadataBlockFacets(dataverseAlias);
+        Response result = target.listMetadataBlockFacets(containerRequestContext, dataverseAlias);
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(404));
         Mockito.verifyNoMoreInteractions(engineSvc);
@@ -106,7 +112,7 @@ public void listMetadataBlockFacets_should_return_the_list_of_metadataBlockFacet
         dataverseMetadataBlockFacet.setMetadataBlock(metadataBlock);
         Mockito.when(engineSvc.submit(Mockito.any(ListMetadataBlockFacetsCommand.class))).thenReturn(Arrays.asList(dataverseMetadataBlockFacet));
 
-        Response response = target.listMetadataBlockFacets(VALID_DATAVERSE.getAlias());
+        Response response = target.listMetadataBlockFacets(containerRequestContext, VALID_DATAVERSE.getAlias());
 
         MatcherAssert.assertThat(response.getStatus(), Matchers.is(200));
         MatcherAssert.assertThat(response.getEntity(), Matchers.notNullValue());
@@ -125,7 +131,7 @@ public void listMetadataBlockFacets_should_return_the_list_of_metadataBlockFacet
     public void listMetadataBlockFacets_should_return_empty_list_when_metadata_block_facet_is_null() throws Exception{
         Mockito.when(engineSvc.submit(Mockito.any(ListMetadataBlockFacetsCommand.class))).thenReturn(null);
 
-        Response response = target.listMetadataBlockFacets(VALID_DATAVERSE.getAlias());
+        Response response = target.listMetadataBlockFacets(containerRequestContext, VALID_DATAVERSE.getAlias());
 
         MatcherAssert.assertThat(response.getStatus(), Matchers.is(200));
         DataverseMetadataBlockFacetDTO result = (DataverseMetadataBlockFacetDTO)response.getEntity();
@@ -141,7 +147,7 @@ public void listMetadataBlockFacets_should_return_empty_list_when_metadata_block
     public void setMetadataBlockFacets_should_return_404_when_dataverse_is_not_found() {
         String dataverseAlias = UUID.randomUUID().toString();
         Mockito.when(dataverseService.findByAlias(dataverseAlias)).thenReturn(null);
-        Response result = target.setMetadataBlockFacets(dataverseAlias, Collections.emptyList());
+        Response result = target.setMetadataBlockFacets(containerRequestContext, dataverseAlias, Collections.emptyList());
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(404));
         Mockito.verifyNoMoreInteractions(engineSvc);
@@ -154,7 +160,7 @@ public void setMetadataBlockFacets_should_return_400_when_dataverse_has_metadata
         Mockito.when(dataverse.isMetadataBlockFacetRoot()).thenReturn(false);
         Mockito.when(dataverseService.findByAlias(dataverseAlias)).thenReturn(dataverse);
 
-        Response result = target.setMetadataBlockFacets(dataverseAlias, Collections.emptyList());
+        Response result = target.setMetadataBlockFacets(containerRequestContext, dataverseAlias, Collections.emptyList());
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(400));
         Mockito.verifyNoMoreInteractions(engineSvc);
@@ -165,7 +171,7 @@ public void setMetadataBlockFacets_should_return_400_when_invalid_metadata_block
         Mockito.when(metadataBlockSvc.findByName("valid_block")).thenReturn(new MetadataBlock());
         Mockito.when(metadataBlockSvc.findByName("invalid_block")).thenReturn(null);
         List<String> metadataBlocks = Arrays.asList("valid_block", "invalid_block");
-        Response result = target.setMetadataBlockFacets(VALID_DATAVERSE.getAlias(), metadataBlocks);
+        Response result = target.setMetadataBlockFacets(containerRequestContext, VALID_DATAVERSE.getAlias(), metadataBlocks);
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(400));
         Mockito.verifyNoMoreInteractions(engineSvc);
@@ -176,7 +182,7 @@ public void setMetadataBlockFacets_should_return_200_when_update_is_successful()
         MetadataBlock validBlock = new MetadataBlock();
         Mockito.when(metadataBlockSvc.findByName("valid_block")).thenReturn(validBlock);
         List<String> metadataBlocks = Arrays.asList("valid_block");
-        Response result = target.setMetadataBlockFacets(VALID_DATAVERSE.getAlias(), metadataBlocks);
+        Response result = target.setMetadataBlockFacets(containerRequestContext, VALID_DATAVERSE.getAlias(), metadataBlocks);
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(200));
         ArgumentCaptor<UpdateMetadataBlockFacetsCommand> updateCommand = ArgumentCaptor.forClass(UpdateMetadataBlockFacetsCommand.class);
@@ -190,7 +196,7 @@ public void setMetadataBlockFacets_should_return_200_when_update_is_successful()
 
     @Test
     public void setMetadataBlockFacets_should_support_empty_metadatablock_list() throws Exception{
-        Response result = target.setMetadataBlockFacets(VALID_DATAVERSE.getAlias(), Collections.emptyList());
+        Response result = target.setMetadataBlockFacets(containerRequestContext, VALID_DATAVERSE.getAlias(), Collections.emptyList());
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(200));
         Mockito.verify(engineSvc).submit(Mockito.any(UpdateMetadataBlockFacetsCommand.class));
@@ -200,7 +206,7 @@ public void setMetadataBlockFacets_should_support_empty_metadatablock_list() thr
     public void updateMetadataBlockFacetsRoot_should_return_404_when_dataverse_is_not_found() {
         String dataverseAlias = UUID.randomUUID().toString();
         Mockito.when(dataverseService.findByAlias(dataverseAlias)).thenReturn(null);
-        Response result = target.updateMetadataBlockFacetsRoot(dataverseAlias, "true");
+        Response result = target.updateMetadataBlockFacetsRoot(containerRequestContext, dataverseAlias, "true");
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(404));
         Mockito.verifyNoMoreInteractions(engineSvc);
@@ -208,7 +214,7 @@ public void updateMetadataBlockFacetsRoot_should_return_404_when_dataverse_is_no
 
     @Test
     public void updateMetadataBlockFacetsRoot_should_return_400_when_invalid_boolean() throws Exception{
-        Response result = target.updateMetadataBlockFacetsRoot(VALID_DATAVERSE.getAlias(), "invalid");
+        Response result = target.updateMetadataBlockFacetsRoot(containerRequestContext, VALID_DATAVERSE.getAlias(), "invalid");
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(400));
         Mockito.verifyNoMoreInteractions(engineSvc);
@@ -217,16 +223,16 @@ public void updateMetadataBlockFacetsRoot_should_return_400_when_invalid_boolean
     @Test
     public void updateMetadataBlockFacetsRoot_should_return_200_and_make_no_update_when_dataverse_is_found_and_facet_root_has_not_changed() {
         // VALID_DATAVERSE.metadataBlockFacetRoot is true
-        Response result = target.updateMetadataBlockFacetsRoot(VALID_DATAVERSE.getAlias(), "true");
+        Response result = target.updateMetadataBlockFacetsRoot(containerRequestContext, VALID_DATAVERSE.getAlias(), "true");
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(200));
-        Mockito.verifyZeroInteractions(engineSvc);
+        Mockito.verifyNoInteractions(engineSvc);
     }
 
     @Test
     public void updateMetadataBlockFacetsRoot_should_return_200_and_execute_command_when_dataverse_is_found_and_facet_root_has_changed() throws Exception {
         // VALID_DATAVERSE.metadataBlockFacetRoot is true
-        Response result = target.updateMetadataBlockFacetsRoot(VALID_DATAVERSE.getAlias(), "false");
+        Response result = target.updateMetadataBlockFacetsRoot(containerRequestContext, VALID_DATAVERSE.getAlias(), "false");
 
         MatcherAssert.assertThat(result.getStatus(), Matchers.is(200));
         ArgumentCaptor<UpdateMetadataBlockFacetRootCommand> updateRootCommand = ArgumentCaptor.forClass(UpdateMetadataBlockFacetRootCommand.class);
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java
index de2a1d422c0..2b29de3e447 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DeactivateUsersIT.java
@@ -1,25 +1,25 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.startsWith;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class DeactivateUsersIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -104,7 +104,7 @@ public void testDeactivateUser() {
 
         Response getUserDeactivated = UtilIT.getAuthenticatedUserByToken(apiToken);
         getUserDeactivated.prettyPrint();
-        getUserDeactivated.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+        getUserDeactivated.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode());
 
         Response userTracesAfterDeactivate = UtilIT.getUserTraces(username, superuserApiToken);
         userTracesAfterDeactivate.prettyPrint();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java
index cae1d0e210a..13f48f9b854 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DeleteUsersIT.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.io.File;
 import java.io.IOException;
@@ -11,16 +11,16 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  * The following query has been helpful in discovering places where user ids
@@ -130,7 +130,7 @@
  */
 public class DeleteUsersIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java
index 7d5adf95507..927efb0b142 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java
@@ -1,9 +1,9 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Headers;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.http.Headers;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
@@ -16,20 +16,23 @@
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
 import java.util.zip.ZipOutputStream;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+
+import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT;
+import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST_PUBLISHED;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Assert;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class DownloadFilesIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -90,7 +93,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // Note that a MANIFEST.TXT file is added.
         HashSet<String> expectedFiles1 = new HashSet<>(Arrays.asList("MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles1, filenamesFound1);
+        assertEquals(expectedFiles1, filenamesFound1);
 
         // A guest user can't download unpublished files.
         // (a guest user cannot even see that the draft version actually exists;
@@ -130,7 +133,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // The creator gets the draft version with an extra file.
         HashSet<String> expectedFiles2 = new HashSet<>(Arrays.asList("LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles2, filenamesFound2);
+        assertEquals(expectedFiles2, filenamesFound2);
 
         Response downloadFiles5 = UtilIT.downloadFiles(datasetPid, null);
         downloadFiles5.then().assertThat()
@@ -140,7 +143,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // A guest user gets the 1.0 version with only 3 files.
         HashSet<String> expectedFiles3 = new HashSet<>(Arrays.asList("MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles3, filenamesFound3);
+        assertEquals(expectedFiles3, filenamesFound3);
 
         // Publishing version 2.0
         UtilIT.publishDatasetViaNativeApi(datasetPid, "major", apiToken)
@@ -154,7 +157,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // By not specifying a version, the creator gets the latest version. In this case, 2.0 (published) with 4 files.
         HashSet<String> expectedFiles4 = new HashSet<>(Arrays.asList("LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles4, filenamesFound4);
+        assertEquals(expectedFiles4, filenamesFound4);
 
         String datasetVersion = "1.0";
         Response downloadFiles7 = UtilIT.downloadFiles(datasetPid, datasetVersion, apiToken);
@@ -165,7 +168,7 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // Creator specifies the 1.0 version and gets the expected 3 files.
         HashSet<String> expectedFiles5 = new HashSet<>(Arrays.asList("MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles5, filenamesFound5);
+        assertEquals(expectedFiles5, filenamesFound5);
 
         // Add Code of Conduct file
         Path pathtoCocFile = Paths.get(Files.createTempDirectory(null) + File.separator + "CODE_OF_CONDUCT.md");
@@ -186,10 +189,9 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // If the creator doesn't specify a version, they get the latest draft with 5 files.
         HashSet<String> expectedFiles6 = new HashSet<>(Arrays.asList("CODE_OF_CONDUCT.md", "LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles6, filenamesFound6);
+        assertEquals(expectedFiles6, filenamesFound6);
 
-        String datasetVersionLatestPublished = ":latest-published";
-        Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, datasetVersionLatestPublished, apiToken);
+        Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken);
         downloadFiles9.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
@@ -197,11 +199,10 @@ public void downloadAllFilesByVersion() throws IOException {
 
         // The contributor requested "latest published" and got version 3 with 4 files.
         HashSet<String> expectedFiles7 = new HashSet<>(Arrays.asList("LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md"));
-        Assert.assertEquals(expectedFiles7, filenamesFound7);
+        assertEquals(expectedFiles7, filenamesFound7);
 
         // Guests cannot download draft versions.
-        String datasetVersionDraft = ":draft";
-        Response downloadFiles10 = UtilIT.downloadFiles(datasetPid, datasetVersionDraft, null);
+        Response downloadFiles10 = UtilIT.downloadFiles(datasetPid, DS_VERSION_DRAFT, null);
         downloadFiles10.prettyPrint();
         downloadFiles10.then().assertThat()
                 .statusCode(UNAUTHORIZED.getStatusCode())
@@ -266,14 +267,14 @@ public void downloadAllFilesRestricted() throws IOException {
                 .statusCode(OK.getStatusCode());
 
         // The creator can download a restricted file from a draft.
-        Assert.assertEquals(new HashSet<>(Arrays.asList("secrets.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles1.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("secrets.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles1.getBody().asInputStream()));
 
         Response downloadFiles2 = UtilIT.downloadFiles(datasetPid, apiToken);
         downloadFiles2.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         // The creator can download a restricted file and an unrestricted file from a draft.
-        Assert.assertEquals(new HashSet<>(Arrays.asList("secrets.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles2.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("secrets.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles2.getBody().asInputStream()));
 
         UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken)
                 .then().assertThat().statusCode(OK.getStatusCode());
@@ -307,14 +308,14 @@ public void downloadAllFilesRestricted() throws IOException {
                 .statusCode(OK.getStatusCode());
 
         // The guest can only get the unrestricted file (and the manifest).
-        Assert.assertEquals(new HashSet<>(Arrays.asList("README.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles4.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("README.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles4.getBody().asInputStream()));
 
         Response downloadFiles5 = UtilIT.downloadFiles(datasetPid, apiToken);
         downloadFiles5.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         // The creator can download both files (and the manifest).
-        Assert.assertEquals(new HashSet<>(Arrays.asList("secrets.md", "README.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles5.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("secrets.md", "README.md", "MANIFEST.TXT")), gatherFilenames(downloadFiles5.getBody().asInputStream()));
 
     }
 
@@ -356,21 +357,21 @@ public void downloadAllFilesTabular() throws IOException {
                 .body("data.files[0].label", equalTo("50by1000.dta"));
 
         // UtilIT.MAXIMUM_INGEST_LOCK_DURATION is 3 but not long enough.
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION + 3));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION + 3), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response downloadFiles1 = UtilIT.downloadFiles(datasetPid, apiToken);
         downloadFiles1.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         // By default we get the archival version (.tab).
-        Assert.assertEquals(new HashSet<>(Arrays.asList("50by1000.tab", "MANIFEST.TXT")), gatherFilenames(downloadFiles1.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("50by1000.tab", "MANIFEST.TXT")), gatherFilenames(downloadFiles1.getBody().asInputStream()));
 
         Response downloadFiles2 = UtilIT.downloadFiles(datasetPid, UtilIT.DownloadFormat.original, apiToken);
         downloadFiles2.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         // By passing format=original we get the original version, Stata (.dta) in this case.
-        Assert.assertEquals(new HashSet<>(Arrays.asList("50by1000.dta", "MANIFEST.TXT")), gatherFilenames(downloadFiles2.getBody().asInputStream()));
+        assertEquals(new HashSet<>(Arrays.asList("50by1000.dta", "MANIFEST.TXT")), gatherFilenames(downloadFiles2.getBody().asInputStream()));
     }
 
     /**
@@ -437,8 +438,8 @@ public void downloadFilenameUtf8() throws IOException {
                 .statusCode(OK.getStatusCode());
         Headers headers = downloadFile.getHeaders();
         // In "MY READ–ME.md" below the space is %20 and the en-dash ("–") is "%E2%80%93" (e2 80 93 in hex).
-        Assert.assertEquals("attachment; filename=\"MY%20READ%E2%80%93ME.md\"", headers.getValue("Content-disposition"));
-        Assert.assertEquals("text/markdown; name=\"MY%20READ%E2%80%93ME.md\";charset=UTF-8", headers.getValue("Content-Type"));
+        assertEquals("attachment; filename=\"MY%20READ%E2%80%93ME.md\"", headers.getValue("Content-disposition"));
+        assertEquals("text/markdown; name=\"MY%20READ%E2%80%93ME.md\";charset=UTF-8", headers.getValue("Content-Type"));
 
         // Download all files as a zip and assert "MY READ–ME.md" has an en-dash.
         Response downloadFiles = UtilIT.downloadFiles(datasetPid, apiToken);
@@ -450,7 +451,7 @@ public void downloadFilenameUtf8() throws IOException {
         // Note that a MANIFEST.TXT file is added.
         // "MY READ–ME.md" (with an en-dash) is correctly extracted from the downloaded zip
         HashSet<String> expectedFiles = new HashSet<>(Arrays.asList("MANIFEST.TXT", "MY READ–ME.md"));
-        Assert.assertEquals(expectedFiles, filenamesFound);
+        assertEquals(expectedFiles, filenamesFound);
     }
 
     private HashSet<String> gatherFilenames(InputStream inputStream) throws IOException {
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriterTest.java b/src/test/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriterTest.java
index 6de52951077..f7f61d18a51 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriterTest.java
@@ -2,16 +2,16 @@
 
 import edu.harvard.iq.dataverse.dataaccess.Range;
 import java.util.List;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import org.junit.Before;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class DownloadInstanceWriterTest {
 
     DownloadInstanceWriter diw;
 
-    @Before
+    @BeforeEach
     public void setUpClass() {
         diw = new DownloadInstanceWriter();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DuplicateFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DuplicateFilesIT.java
index 6227e96fdfa..32b949fad25 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DuplicateFilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DuplicateFilesIT.java
@@ -1,23 +1,23 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.parsing.Parser;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.parsing.Parser;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.nullValue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  * Make assertions about duplicate file names (and maybe in the future,
@@ -25,7 +25,7 @@
  */
 public class DuplicateFilesIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java b/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java
index 1775649c3dd..3b690163a39 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/EditDDIIT.java
@@ -1,18 +1,16 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import static edu.harvard.iq.dataverse.api.AccessIT.apiToken;
-import static edu.harvard.iq.dataverse.api.AccessIT.datasetId;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 
 
 import edu.harvard.iq.dataverse.datavariable.VarGroup;
 import edu.harvard.iq.dataverse.datavariable.VariableMetadata;
 import edu.harvard.iq.dataverse.datavariable.VariableMetadataDDIParser;
 
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 
 import java.nio.file.Paths;
@@ -28,20 +26,21 @@
 import javax.xml.stream.XMLStreamException;
 
 
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
+import static jakarta.ws.rs.core.Response.Status.OK;
 
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
 import java.nio.file.Files;
-import static org.junit.Assert.assertTrue;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 
 public class EditDDIIT {
 
     private static final Logger logger = Logger.getLogger(EditDDIIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -81,7 +80,7 @@ public void testUpdateVariableMetadata() throws InterruptedException {
 
         // Give file time to ingest
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest);
         
         Response origXml = UtilIT.getFileMetadata(origFileId, null, apiToken);
         assertEquals(200, origXml.getStatusCode());
@@ -150,6 +149,7 @@ public void testUpdateVariableMetadata() throws InterruptedException {
         assertEquals(200, publishDataset.getStatusCode());
 
         Response editDDIResponseNewDraft = UtilIT.editDDI(stringOrigXml, origFileId, apiToken);
+        editDDIResponseNewDraft.prettyPrint();
         assertEquals(200, editDDIResponseNewDraft.getStatusCode());
 
         //not authorized
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java
index cdebeddb7bc..022747a3cdc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/ExternalToolsIT.java
@@ -1,31 +1,32 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.io.StringReader;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.CoreMatchers;
 import org.hamcrest.Matchers;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
-import org.junit.Ignore;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Disabled;
 
 public class ExternalToolsIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -89,7 +90,7 @@ public void testFileLevelTool1() {
         uploadTabularFile.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToTabularFile, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToTabularFile);
         Integer tabularFileId = JsonPath.from(uploadTabularFile.getBody().asString()).getInt("data.files[0].dataFile.id");
 
         JsonObjectBuilder job = Json.createObjectBuilder();
@@ -127,7 +128,7 @@ public void testFileLevelTool1() {
         getExternalToolsForFileInvalidType.prettyPrint();
         getExternalToolsForFileInvalidType.then().assertThat()
                 .statusCode(BAD_REQUEST.getStatusCode())
-                .body("message", CoreMatchers.equalTo("Type must be one of these values: [explore, configure, preview]."));
+                .body("message", CoreMatchers.equalTo("Type must be one of these values: [explore, configure, preview, query]."));
 
         Response getExternalToolsForTabularFiles = UtilIT.getExternalToolsForFile(tabularFileId.toString(), "explore", apiToken);
         getExternalToolsForTabularFiles.prettyPrint();
@@ -223,7 +224,7 @@ public void testDatasetLevelTool1() {
         getExternalToolsByDatasetIdInvalidType.prettyPrint();
         getExternalToolsByDatasetIdInvalidType.then().assertThat()
                 .statusCode(BAD_REQUEST.getStatusCode())
-                .body("message", CoreMatchers.equalTo("Type must be one of these values: [explore, configure, preview]."));
+                .body("message", CoreMatchers.equalTo("Type must be one of these values: [explore, configure, preview, query]."));
 
         Response getExternalToolsByDatasetId = UtilIT.getExternalToolsForDataset(datasetId.toString(), "explore", apiToken);
         getExternalToolsByDatasetId.prettyPrint();
@@ -235,6 +236,84 @@ public void testDatasetLevelTool1() {
 
     }
 
+    @Test
+    public void testDatasetLevelToolConfigure() {
+
+        // Delete all external tools before testing.
+        Response getTools = UtilIT.getExternalTools();
+        getTools.prettyPrint();
+        getTools.then().assertThat()
+                .statusCode(OK.getStatusCode());
+        String body = getTools.getBody().asString();
+        JsonReader bodyObject = Json.createReader(new StringReader(body));
+        JsonArray tools = bodyObject.readObject().getJsonArray("data");
+        for (int i = 0; i < tools.size(); i++) {
+            JsonObject tool = tools.getJsonObject(i);
+            int id = tool.getInt("id");
+            Response deleteExternalTool = UtilIT.deleteExternalTool(id);
+            deleteExternalTool.prettyPrint();
+        }
+
+        Response createUser = UtilIT.createRandomUser();
+        createUser.prettyPrint();
+        createUser.then().assertThat()
+                .statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.prettyPrint();
+        createDataverseResponse.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDataset.prettyPrint();
+        createDataset.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        Integer datasetId = JsonPath.from(createDataset.getBody().asString()).getInt("data.id");
+        String datasetPid = JsonPath.from(createDataset.getBody().asString()).getString("data.persistentId");
+
+        String toolManifest = """
+{
+   "displayName": "Dataset Configurator",
+   "description": "Slices! Dices! <a href='https://docs.datasetconfigurator.com' target='_blank'>More info</a>.",
+   "types": [
+     "configure"
+   ],
+   "scope": "dataset",
+   "toolUrl": "https://datasetconfigurator.com",
+   "toolParameters": {
+     "queryParameters": [
+       {
+         "datasetPid": "{datasetPid}"
+       },
+       {
+         "localeCode": "{localeCode}"
+       }
+     ]
+   }
+ }
+""";
+
+        Response addExternalTool = UtilIT.addExternalTool(JsonUtil.getJsonObject(toolManifest));
+        addExternalTool.prettyPrint();
+        addExternalTool.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.displayName", CoreMatchers.equalTo("Dataset Configurator"));
+
+        Response getExternalToolsByDatasetId = UtilIT.getExternalToolsForDataset(datasetId.toString(), "configure", apiToken);
+        getExternalToolsByDatasetId.prettyPrint();
+        getExternalToolsByDatasetId.then().assertThat()
+                .body("data[0].displayName", CoreMatchers.equalTo("Dataset Configurator"))
+                .body("data[0].scope", CoreMatchers.equalTo("dataset"))
+                .body("data[0].types[0]", CoreMatchers.equalTo("configure"))
+                .body("data[0].toolUrlWithQueryParams", CoreMatchers.equalTo("https://datasetconfigurator.com?datasetPid=" + datasetPid))
+                .statusCode(OK.getStatusCode());
+
+    }
+
     @Test
     public void testAddFilelToolNoFileId() throws IOException {
         JsonObjectBuilder job = Json.createObjectBuilder();
@@ -309,7 +388,7 @@ public void testAddExternalToolNonReservedWord() throws IOException {
                 .statusCode(BAD_REQUEST.getStatusCode());
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void deleteTools() {
 
@@ -330,7 +409,7 @@ public void deleteTools() {
     }
 
     // preview only
-    @Ignore
+    @Disabled
     @Test
     public void createToolShellScript() {
         JsonObjectBuilder job = Json.createObjectBuilder();
@@ -370,7 +449,7 @@ public void createToolShellScript() {
     }
 
     // explore only
-    @Ignore
+    @Disabled
     @Test
     public void createToolDataExplorer() {
         JsonObjectBuilder job = Json.createObjectBuilder();
@@ -403,7 +482,7 @@ public void createToolDataExplorer() {
     }
 
     // both preview and explore
-    @Ignore
+    @Disabled
     @Test
     public void createToolSpreadsheetViewer() {
         JsonObjectBuilder job = Json.createObjectBuilder();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FeedbackApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FeedbackApiIT.java
index bfe4a58abbf..220d386e28e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FeedbackApiIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FeedbackApiIT.java
@@ -1,19 +1,19 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.CoreMatchers;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class FeedbackApiIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FileMetadataIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FileMetadataIT.java
index 1ca7c99be8e..619a7594244 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FileMetadataIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FileMetadataIT.java
@@ -1,22 +1,23 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.http.ContentType;
+import io.restassured.response.Response;
 import org.apache.commons.io.IOUtils;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.fail;
+import static io.restassured.RestAssured.given;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class FileMetadataIT {
 
@@ -32,12 +33,12 @@ public class FileMetadataIT {
     private static int dsId;
     private static int dsIdFirst;
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
-    @Before
+    @BeforeEach
     public void setUpDataverse() {
         try {
             // create random test name
@@ -79,17 +80,16 @@ public void setUpDataverse() {
                     .then().assertThat().statusCode(201);
             System.out.println("DATAVERSE: " + RestAssured.baseURI + "/dataverse/" + testName);
         } catch (Exception e) {
-            System.out.println("Error setting up test dataverse: " + e.getMessage());
-            fail();
+            fail("Error setting up test dataverse: " + e.getMessage(), e);
         }
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
         RestAssured.reset();
     }
 
-    @After
+    @AfterEach
     public void tearDownDataverse() {
         try {
             // delete dataset
@@ -165,9 +165,7 @@ public void testJsonParserWithDirectoryLabels() {
                     .statusCode(200);
 
         } catch (Exception e) {
-            System.out.println("Error testJsonParserWithDirectoryLabels: " + e.getMessage());
-            e.printStackTrace();
-            fail();
+            fail("Error testJsonParserWithDirectoryLabels: " + e.getMessage(), e);
         }
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FileTypeDetectionIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FileTypeDetectionIT.java
index 10ac9192205..989688b0af6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FileTypeDetectionIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FileTypeDetectionIT.java
@@ -1,21 +1,21 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.nullValue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class FileTypeDetectionIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
index a373ee694c2..915f82a6de2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java
@@ -1,49 +1,62 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+
+import java.util.List;
 import java.util.logging.Logger;
 
-import org.junit.Test;
-import org.junit.BeforeClass;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.path.xml.XmlPath;
-import static edu.harvard.iq.dataverse.api.AccessIT.apiToken;
+import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeAll;
+import io.restassured.path.json.JsonPath;
+
+import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT;
+import static io.restassured.path.json.JsonPath.with;
+import io.restassured.path.xml.XmlPath;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.io.File;
 import java.io.IOException;
+
 import static java.lang.Thread.sleep;
-import java.math.BigDecimal;
+
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.text.MessageFormat;
 import java.util.Arrays;
 import java.util.Collections;
-import java.util.ResourceBundle;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
+import java.util.Map;
 
-import static javax.ws.rs.core.Response.Status.*;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+
+import static jakarta.ws.rs.core.Response.Status.*;
 import org.hamcrest.CoreMatchers;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.startsWith;
-import static org.hamcrest.CoreMatchers.nullValue;
 import org.hamcrest.Matchers;
+import org.junit.jupiter.api.AfterAll;
 
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.hamcrest.CoreMatchers.*;
+import static org.hamcrest.CoreMatchers.hasItem;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class FilesIT {
 
     private static final Logger logger = Logger.getLogger(FilesIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+
+        Response removePublicInstall = UtilIT.deleteSetting(SettingsServiceBean.Key.PublicInstall);
+        removePublicInstall.then().assertThat().statusCode(200);
+
+    }
+
+    @AfterAll
+    public static void tearDownClass() {
+        UtilIT.deleteSetting(SettingsServiceBean.Key.PublicInstall);
     }
 
     /**
@@ -62,7 +75,7 @@ private String createUserGetToken(){
         
         String username = UtilIT.getUsernameFromResponse(createUser);
         String apiToken = UtilIT.getApiTokenFromResponse(createUser);
-        
+        System.out.println(apiToken);
         return apiToken;
     }
     
@@ -121,7 +134,7 @@ public void test_001_AddFileGood() {
                  * @todo We have a need to show human readable success messages
                  * via API in a consistent location.
                  */
-                .body("status", equalTo(AbstractApiBean.STATUS_OK))
+                .body("status", equalTo(ApiConstants.STATUS_OK))
                 .body("data.files[0].categories[0]", equalTo("Data"))
                 .body("data.files[0].dataFile.contentType", equalTo("image/png"))
                 .body("data.files[0].dataFile.description", equalTo("my description"))
@@ -192,7 +205,7 @@ public void test_003_AddFileNonExistentDatasetId() {
         String errMsg = BundleUtil.getStringFromBundle("find.dataset.error.dataset.not.found.id", Collections.singletonList(datasetId));
                 
          addResponse.then().assertThat()
-                .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+                .body("status", equalTo(ApiConstants.STATUS_ERROR))
                 .body("message", equalTo(errMsg))
                 .statusCode(NOT_FOUND.getStatusCode());
     }
@@ -213,12 +226,11 @@ public void test_004_AddFileBadToken() {
 
         msgt("Here it is: " + addResponse.prettyPrint());
 
-        String errMsg = BundleUtil.getStringFromBundle("file.addreplace.error.auth");
-        
+
         addResponse.then().assertThat()
-                .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
-                .body("message", equalTo(errMsg))
-                .statusCode(FORBIDDEN.getStatusCode());
+                .body("status", equalTo(ApiConstants.STATUS_ERROR))
+                .body("message", equalTo(ApiKeyAuthMechanism.RESPONSE_MESSAGE_BAD_API_KEY))
+                .statusCode(UNAUTHORIZED.getStatusCode());
     }
 
     @Test
@@ -243,7 +255,7 @@ public void testAddFileBadJson() {
         
         addResponse.then().assertThat()
         .statusCode(BAD_REQUEST.getStatusCode())
-        .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+        .body("status", equalTo(ApiConstants.STATUS_ERROR))
         .body("message", equalTo(parseError));
     }
     
@@ -276,7 +288,7 @@ public void test_005_AddFileBadPermissions() {
       
         addResponse.then().assertThat()
                 .body("message", equalTo(errMsg))
-                .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+                .body("status", equalTo(ApiConstants.STATUS_ERROR))
                 .statusCode(FORBIDDEN.getStatusCode());
     }
 
@@ -351,7 +363,7 @@ public void test_006_ReplaceFileGood() throws InterruptedException {
         replaceRespWrongCtype.prettyPrint();
         replaceRespWrongCtype.then().assertThat()
                 .statusCode(BAD_REQUEST.getStatusCode())
-                .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+                .body("status", equalTo(ApiConstants.STATUS_ERROR))
                 .body("message", equalTo(errMsgCtype));
                 //.body("data.rootDataFileId", equalTo(origFileId))    
         
@@ -433,7 +445,7 @@ public void test_006_ReplaceFileGood() throws InterruptedException {
                  */
                 //                .body("message", equalTo(successMsg2))
                 .statusCode(OK.getStatusCode())
-                .body("status", equalTo(AbstractApiBean.STATUS_OK))
+                .body("status", equalTo(ApiConstants.STATUS_OK))
                 .body("data.files[0].label", equalTo("005.txt"))
                 // yes, replacing a file blanks out the description (and categories)
                 .body("data.files[0].description", equalTo(""))
@@ -500,7 +512,7 @@ public void test_006_ReplaceFileGoodTabular() throws InterruptedException {
 
         // give file time to ingest
        // sleep(10000);
-       assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+       assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Response ddi = UtilIT.getFileMetadata(origFileId.toString(), "ddi", apiToken);
 //        ddi.prettyPrint();
@@ -772,7 +784,7 @@ public void test_007_ReplaceFileUnpublishedAndBadIds() {
         replaceResp2.then().assertThat()
                 // TODO: Some day, change this from BAD_REQUEST to NOT_FOUND and expect the standard error message.
                .statusCode(BAD_REQUEST.getStatusCode())
-               .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+               .body("status", equalTo(ApiConstants.STATUS_ERROR))
                .body("message", Matchers.equalTo(BundleUtil.getStringFromBundle("file.addreplace.error.existing_file_to_replace_not_found_by_id", Arrays.asList(fakeFileId + ""))))
                ;
 
@@ -858,7 +870,7 @@ public void test_008_ReplaceFileAlreadyDeleted() {
         
         replaceResp.then().assertThat()
                .statusCode(BAD_REQUEST.getStatusCode())
-               .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+               .body("status", equalTo(ApiConstants.STATUS_ERROR))
                .body("message", Matchers.startsWith(errMsgDeleted))
                ;       
         
@@ -917,7 +929,7 @@ public void testReplaceFileBadJson() {
         String parseError = BundleUtil.getStringFromBundle("file.addreplace.error.parsing");
         replaceResp.then().assertThat()
                 .statusCode(BAD_REQUEST.getStatusCode())
-                .body("status", equalTo(AbstractApiBean.STATUS_ERROR))
+                .body("status", equalTo(ApiConstants.STATUS_ERROR))
                 .body("message", equalTo(parseError));
 
     }
@@ -1093,6 +1105,9 @@ public void testAccessFacet() {
         msg("Add initial file");
         String pathToFile = "src/main/webapp/resources/images/dataverseproject.png";
         Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+        
+        // Wait a little while for the index to pick up the file, otherwise timing issue with searching for it.
+        UtilIT.sleepForReindex(datasetId.toString(), apiToken, 4);
 
         String successMsgAdd = BundleUtil.getStringFromBundle("file.addreplace.success.add");
 
@@ -1103,9 +1118,9 @@ public void testAccessFacet() {
 
         long fileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
 
-        Response searchShouldFindNothingBecauseUnpublished = UtilIT.search("id:datafile_" + fileId + "_draft", apiToken);
-        searchShouldFindNothingBecauseUnpublished.prettyPrint();
-        searchShouldFindNothingBecauseUnpublished.then().assertThat()
+        Response searchShouldFindBecauseAuthorApiTokenSupplied = UtilIT.search("id:datafile_" + fileId + "_draft", apiToken);
+        searchShouldFindBecauseAuthorApiTokenSupplied.prettyPrint();
+        searchShouldFindBecauseAuthorApiTokenSupplied.then().assertThat()
                 .body("data.total_count", equalTo(1))
                 .statusCode(OK.getStatusCode());
 
@@ -1201,7 +1216,7 @@ public void testUningestFileViaApi() throws InterruptedException {
         assertNotNull(origFileId);    // If checkOut fails, display message
        // sleep(10000);
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
         Response uningestFileResponse = UtilIT.uningestFile(origFileId, apiToken);
         assertEquals(200, uningestFileResponse.getStatusCode());       
     }
@@ -1235,7 +1250,7 @@ public void testFileMetaDataGetUpdateRoundTrip() throws InterruptedException {
         Long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
         
         //sleep(2000); //ensure tsv is consumed
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
         msg("Publish dataverse and dataset");
         Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
         publishDataversetResp.then().assertThat()
@@ -1333,7 +1348,7 @@ public void testDataSizeInDataverse() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
         
         // wait for it to ingest... 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, 5));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, 5), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
      //   sleep(10000);
      
         Response publishDataversetResp = UtilIT.publishDataverseViaSword(dataverseAlias, apiToken);
@@ -1341,7 +1356,7 @@ public void testDataSizeInDataverse() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
         String apiTokenRando = createUserGetToken();
         
-        Response datasetStorageSizeResponseDraft = UtilIT.findDatasetDownloadSize(datasetId.toString(), ":draft", apiTokenRando);
+        Response datasetStorageSizeResponseDraft = UtilIT.findDatasetDownloadSize(datasetId.toString(), DS_VERSION_DRAFT, apiTokenRando);
         datasetStorageSizeResponseDraft.prettyPrint();
         assertEquals(UNAUTHORIZED.getStatusCode(), datasetStorageSizeResponseDraft.getStatusCode());  
         Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
@@ -1397,8 +1412,8 @@ public void testGetFileInfo() {
         createUser = UtilIT.createRandomUser();
         String apiTokenRegular = UtilIT.getApiTokenFromResponse(createUser);
 
-        msg("Add tabular file");
-        String pathToFile = "scripts/search/data/tabular/stata13-auto-withstrls.dta";
+        msg("Add a non-tabular file");
+        String pathToFile = "scripts/search/data/binary/trees.png";
         Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
 
         String dataFileId = addResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
@@ -1410,10 +1425,12 @@ public void testGetFileInfo() {
 
         getFileDataResponse.prettyPrint();
         getFileDataResponse.then().assertThat()
-                .body("data.label", equalTo("stata13-auto-withstrls.dta"))
-                .body("data.dataFile.filename", equalTo("stata13-auto-withstrls.dta"))
+                .body("data.label", equalTo("trees.png"))
+                .body("data.dataFile.filename", equalTo("trees.png"))
+                .body("data.dataFile.contentType", equalTo("image/png"))
+                .body("data.dataFile.filesize", equalTo(8361))
                 .statusCode(OK.getStatusCode());
-
+        
         getFileDataResponse = UtilIT.getFileData(dataFileId, apiTokenRegular);
         getFileDataResponse.then().assertThat()
                 .statusCode(BAD_REQUEST.getStatusCode());
@@ -1475,7 +1492,7 @@ public void testValidateDDI_issue6027() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
 
         // give file time to ingest
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
        // sleep(10000);
 
         Response ddi = UtilIT.getFileMetadata(origFileId.toString(), "ddi", apiToken);
@@ -1534,7 +1551,7 @@ public void test_ProcessShapeFilePackage() {
         // zip archive etc. etc. - but this should be a good start. 
         // -- L.A. 2020/09
         addResponse.then().assertThat()
-                .body("status", equalTo(AbstractApiBean.STATUS_OK))
+                .body("status", equalTo(ApiConstants.STATUS_OK))
                 .body("data.files[0].dataFile.contentType", equalTo(extractedShapeType))
                 .body("data.files[0].label", equalTo(extractedShapeName))
                 .body("data.files[0].directoryLabel", equalTo(extractedFolderName))
@@ -1573,7 +1590,7 @@ public void test_CrawlableAccessToDatasetFiles() {
         msgt("Server response: " + addResponse.prettyPrint());
       
         addResponse.then().assertThat()
-                .body("status", equalTo(AbstractApiBean.STATUS_OK))
+                .body("status", equalTo(ApiConstants.STATUS_OK))
                 .body("data.files[0].label", equalTo(testFileName))
                 .body("data.files[0].directoryLabel", equalTo(folderName))
                 .body("data.files[0].description", equalTo(description))
@@ -1592,7 +1609,7 @@ public void test_CrawlableAccessToDatasetFiles() {
         // Expected values in the output: 
         String expectedTitleTopFolder = "Index of folder /";
         String expectedLinkTopFolder = folderName + "/";
-        String expectedLinkAhrefTopFolder = "/api/datasets/"+datasetId+"/dirindex/?version=:draft&folder=subfolder";
+        String expectedLinkAhrefTopFolder = "/api/datasets/"+datasetId+"/dirindex/?version=" + DS_VERSION_DRAFT + "&folder=subfolder";
         
         String expectedTitleSubFolder = "Index of folder /" + folderName;
         String expectedLinkAhrefSubFolder = "/api/access/datafile/" + folderName + "/" + dataFileId;
@@ -1730,7 +1747,7 @@ public void testRange() throws IOException {
 
         Integer fileIdCsv = JsonPath.from(uploadFileCsv.body().asString()).getInt("data.files[0].dataFile.id");
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToCsv, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", authorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToCsv);
 
         // Just the tabular file, not the original, no byte range. Vanilla.
         Response downloadFileNoArgs = UtilIT.downloadFile(fileIdCsv, null, null, null, authorApiToken);
@@ -1863,7 +1880,7 @@ public void testAddFileToDatasetSkipTabIngest() throws IOException, InterruptedE
         logger.info(r.prettyPrint());
         assertEquals(200, r.getStatusCode());
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetIdInt, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetIdInt, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Long dataFileId = JsonPath.from(r.body().asString()).getLong("data.files[0].dataFile.id");
         Response fileMeta = UtilIT.getDataFileMetadataDraft(dataFileId, apiToken);
@@ -1876,7 +1893,7 @@ public void testAddFileToDatasetSkipTabIngest() throws IOException, InterruptedE
         logger.info(rTabIngest.prettyPrint());
         assertEquals(200, rTabIngest.getStatusCode());
 
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFile, UtilIT.sleepForLock(datasetIdInt, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetIdInt, "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFile);
 
         Long ingDataFileId = JsonPath.from(rTabIngest.body().asString()).getLong("data.files[0].dataFile.id");
         Response ingFileMeta = UtilIT.getDataFileMetadataDraft(ingDataFileId, apiToken);
@@ -1895,4 +1912,575 @@ public void testAddFileToDatasetSkipTabIngest() throws IOException, InterruptedE
 
     }
 
+    @Test
+    public void testDeleteFile() {
+        msgt("testDeleteFile");
+        // Create user
+        String apiToken = createUserGetToken();
+
+        // Create user with no permission
+        String apiTokenNoPerms = createUserGetToken();
+
+        // Create Dataverse
+        String dataverseAlias = createDataverseGetAlias(apiToken);
+
+        // Create Dataset
+        Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDataset.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset);
+        String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId");
+
+        // Upload file 1
+        String pathToFile1 = "src/main/webapp/resources/images/dataverseproject.png";
+        JsonObjectBuilder json1 = Json.createObjectBuilder()
+                .add("description", "my description1")
+                .add("directoryLabel", "data/subdir1")
+                .add("categories", Json.createArrayBuilder().add("Data"));
+        Response uploadResponse1 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile1, json1.build(), apiToken);
+        uploadResponse1.then().assertThat().statusCode(OK.getStatusCode());
+
+        Integer fileId1 = JsonPath.from(uploadResponse1.body().asString()).getInt("data.files[0].dataFile.id");
+
+        // Check file uploaded
+        Response downloadResponse1 = UtilIT.downloadFile(fileId1, null, null, null, apiToken);
+        downloadResponse1.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Delete file 1
+        Response deleteResponseFail = UtilIT.deleteFileApi(fileId1, apiTokenNoPerms);
+        deleteResponseFail.prettyPrint();
+        deleteResponseFail.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+
+        Response deleteResponse1 = UtilIT.deleteFileApi(fileId1, apiToken);
+        deleteResponse1.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Check file 1 deleted for good because it was in a draft
+        Response downloadResponse1notFound = UtilIT.downloadFile(fileId1, null, null, null, apiToken);
+        downloadResponse1notFound.then().assertThat().statusCode(NOT_FOUND.getStatusCode());
+
+        // Upload file 2
+        String pathToFile2 = "src/main/webapp/resources/images/cc0.png";
+        JsonObjectBuilder json2 = Json.createObjectBuilder()
+                .add("description", "my description2")
+                .add("directoryLabel", "data/subdir1")
+                .add("categories", Json.createArrayBuilder().add("Data"));
+        Response uploadResponse2 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile2, json2.build(), apiToken);
+        uploadResponse2.then().assertThat().statusCode(OK.getStatusCode());
+
+        Integer fileId2 = JsonPath.from(uploadResponse2.body().asString()).getInt("data.files[0].dataFile.id");
+
+        // Upload file 3
+        String pathToFile3 = "src/main/webapp/resources/images/orcid_16x16.png";
+        JsonObjectBuilder json3 = Json.createObjectBuilder()
+                .add("description", "my description3")
+                .add("directoryLabel", "data/subdir1")
+                .add("categories", Json.createArrayBuilder().add("Data"));
+        Response uploadResponse3 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile3, json3.build(), apiToken);
+        uploadResponse3.then().assertThat().statusCode(OK.getStatusCode());
+
+        Integer fileId3 = JsonPath.from(uploadResponse3.body().asString()).getInt("data.files[0].dataFile.id");
+
+        // Publish collection and dataset
+        UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());
+        UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode());
+
+        Response deleteResponse2 = UtilIT.deleteFileApi(fileId2, apiToken);
+        deleteResponse2.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Check file 2 deleted from post v1.0 draft
+        Response postv1draft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken);
+        postv1draft.prettyPrint();
+        postv1draft.then().assertThat()
+                .body("data.files.size()", equalTo(1))
+                .statusCode(OK.getStatusCode());
+
+        // Check file 2 still in v1.0
+        Response v1 = UtilIT.getDatasetVersion(datasetPid, "1.0", apiToken);
+        v1.prettyPrint();
+        v1.then().assertThat()
+                .body("data.files[0].dataFile.filename", equalTo("cc0.png"))
+                .statusCode(OK.getStatusCode());
+        
+        Map<String, Object> v1files1 = with(v1.body().asString()).param("fileToFind", "cc0.png")
+                .getJsonObject("data.files.find { files -> files.label == fileToFind }");
+        assertEquals("cc0.png", v1files1.get("label"));
+
+        // Check file 2 still downloadable (published in in v1.0)
+        Response downloadResponse2 = UtilIT.downloadFile(fileId2, null, null, null, apiToken);
+        downloadResponse2.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Check file 3 still in post v1.0 draft
+        Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken);
+        postv1draft2.prettyPrint();
+        postv1draft2.then().assertThat()
+                .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png"))
+                .statusCode(OK.getStatusCode());
+        
+        Map<String, Object> v1files2 = with(postv1draft2.body().asString()).param("fileToFind", "orcid_16x16.png")
+                .getJsonObject("data.files.find { files -> files.label == fileToFind }");
+        assertEquals("orcid_16x16.png", v1files2.get("label"));
+
+        // Delete file 3, the current version is still draft
+        Response deleteResponse3 = UtilIT.deleteFileApi(fileId3, apiToken);
+        deleteResponse3.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Check file 3 deleted from post v1.0 draft
+        Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken);
+        postv1draft3.prettyPrint();
+        postv1draft3.then().assertThat()
+                .body("data.files[0]", equalTo(null))
+                .statusCode(OK.getStatusCode());
+    }
+    
+    // The following specifically tests file-level PIDs configuration in 
+    // individual collections (#8889/#9614)
+    @Test
+    public void testFilePIDsBehavior() {
+        // Create user
+        Response createUser = UtilIT.createRandomUser();
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        String username = UtilIT.getUsernameFromResponse(createUser);
+        Response toggleSuperuser = UtilIT.makeSuperUser(username);
+        toggleSuperuser.then().assertThat()
+                .statusCode(OK.getStatusCode());
+        try {
+            UtilIT.enableSetting(SettingsServiceBean.Key.FilePIDsEnabled);
+
+            // Create Dataverse
+            String collectionAlias = createDataverseGetAlias(apiToken);
+
+            // Create Initial Dataset with 1 file:
+            Integer datasetId = createDatasetGetId(collectionAlias, apiToken);
+            String pathToFile = "scripts/search/data/replace_test/003.txt";
+            Response addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+
+            addResponse.then().assertThat().body("data.files[0].dataFile.contentType", equalTo("text/plain"))
+                    .body("data.files[0].label", equalTo("003.txt")).statusCode(OK.getStatusCode());
+
+            Long origFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
+
+            // -------------------------
+            // Publish dataverse and dataset
+            // -------------------------
+            msg("Publish dataverse and dataset");
+            Response publishCollectionResp = UtilIT.publishDataverseViaSword(collectionAlias, apiToken);
+            publishCollectionResp.then().assertThat().statusCode(OK.getStatusCode());
+
+            Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+            publishDatasetResp.then().assertThat().statusCode(OK.getStatusCode());
+
+            // The file in this dataset should have been assigned a PID when it was
+            // published:
+            Response fileInfoResponse = UtilIT.getFileData(origFileId.toString(), apiToken);
+            fileInfoResponse.then().assertThat().statusCode(OK.getStatusCode());
+            String fileInfoResponseString = fileInfoResponse.body().asString();
+            msg(fileInfoResponseString);
+
+            String origFilePersistentId = JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId");
+            assertNotNull(
+                    "The file did not get a persistent identifier assigned (check that file PIDs are enabled instance-wide!)",
+                    origFilePersistentId);
+
+            // Now change the file PIDs registration configuration for the collection:
+            UtilIT.enableSetting(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection);
+            Response changeAttributeResp = UtilIT.setCollectionAttribute(collectionAlias, "filePIDsEnabled", "false",
+                    apiToken);
+
+            // ... And do the whole thing with creating another dataset with a file:
+
+            datasetId = createDatasetGetId(collectionAlias, apiToken);
+            addResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+            addResponse.then().assertThat().statusCode(OK.getStatusCode());
+            Long newFileId = JsonPath.from(addResponse.body().asString()).getLong("data.files[0].dataFile.id");
+
+            // And publish this dataset:
+            msg("Publish second dataset");
+
+            publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+            publishDatasetResp.then().assertThat().statusCode(OK.getStatusCode());
+
+            // And confirm that the file didn't get a PID:
+
+            fileInfoResponse = UtilIT.getFileData(newFileId.toString(), apiToken);
+            fileInfoResponse.then().assertThat().statusCode(OK.getStatusCode());
+            fileInfoResponseString = fileInfoResponse.body().asString();
+            msg(fileInfoResponseString);
+
+            assertEquals("", JsonPath.from(fileInfoResponseString).getString("data.dataFile.persistentId"),
+                "The file was NOT supposed to be issued a PID");
+        } finally {
+            UtilIT.deleteSetting(SettingsServiceBean.Key.FilePIDsEnabled);
+            UtilIT.deleteSetting(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection);
+        }
+    }
+
+    @Test
+    public void testGetFileDownloadCount() throws InterruptedException {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        // Upload test file
+        String pathToTestFile = "src/test/resources/images/coffeeshop.png";
+        Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Publish collection and dataset
+        UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode());
+        UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode());
+
+        // Download test file
+        int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id");
+
+        Response downloadResponse = UtilIT.downloadFile(testFileId, apiToken);
+        downloadResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Ensure download count is updated
+        sleep(2000);
+
+        // Get download count and assert it is 1
+        Response getFileDownloadCountResponse = UtilIT.getFileDownloadCount(Integer.toString(testFileId), apiToken);
+        getFileDownloadCountResponse.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.message", equalTo("1"));
+
+        // Call with invalid file id
+        Response getFileDownloadCountInvalidIdResponse = UtilIT.getFileDownloadCount("testInvalidId", apiToken);
+        getFileDownloadCountInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+    }
+
+    @Test
+    public void testGetFileDataTables() throws InterruptedException {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        // Upload non-tabular file
+        String pathToNonTabularTestFile = "src/test/resources/images/coffeeshop.png";
+        Response uploadNonTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToNonTabularTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadNonTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Assert that getting data tables for non-tabular file fails
+        int testNonTabularFileId = JsonPath.from(uploadNonTabularFileResponse.body().asString()).getInt("data.files[0].dataFile.id");
+        Response getFileDataTablesForNonTabularFileResponse = UtilIT.getFileDataTables(Integer.toString(testNonTabularFileId), apiToken);
+        getFileDataTablesForNonTabularFileResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+
+        // Upload tabular file
+        String pathToTabularTestFile = "src/test/resources/tab/test.tab";
+        Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Ensure tabular file is ingested
+        sleep(2000);
+
+        String testTabularFileId = Integer.toString(JsonPath.from(uploadTabularFileResponse.body().asString()).getInt("data.files[0].dataFile.id"));
+
+        // Get file data tables for the tabular file and assert data is obtained
+        Response getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, apiToken);
+        getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+        int dataTablesNumber = JsonPath.from(getFileDataTablesForTabularFileResponse.body().asString()).getList("data").size();
+        assertTrue(dataTablesNumber > 0);
+
+        // Get file data tables for a restricted tabular file as the owner and assert data is obtained
+        Response restrictFileResponse = UtilIT.restrictFile(testTabularFileId, true, apiToken);
+        restrictFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+        getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, apiToken);
+        getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Get file data tables for a restricted tabular file as other user and assert forbidden error is thrown
+        Response createRandomUser = UtilIT.createRandomUser();
+        createRandomUser.then().assertThat().statusCode(OK.getStatusCode());
+        String randomUserApiToken = UtilIT.getApiTokenFromResponse(createRandomUser);
+        getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, randomUserApiToken);
+        getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode());
+    }
+
+    @Test
+    public void testSetFileCategories() {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        // Upload test file
+        String pathToTestFile = "src/test/resources/images/coffeeshop.png";
+        Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
+
+        // Set categories
+        String testCategory1 = "testCategory1";
+        String testCategory2 = "testCategory2";
+        List<String> testCategories = List.of(testCategory1, testCategory2);
+        Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, testCategories);
+        setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Get file data and check for new categories
+        Response getFileDataResponse = UtilIT.getFileData(dataFileId, apiToken);
+        getFileDataResponse.prettyPrint();
+        getFileDataResponse.then().assertThat()
+                .body("data.categories", hasItem(testCategory1))
+                .body("data.categories", hasItem(testCategory2))
+                .statusCode(OK.getStatusCode());
+    }
+
+    @Test
+    public void testSetFileTabularTags() throws InterruptedException {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        // Upload tabular file
+        String pathToTabularTestFile = "src/test/resources/tab/test.tab";
+        Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
+
+        // Ensure tabular file is ingested
+        sleep(2000);
+
+        // Set tabular tags
+        String testTabularTag1 = "Survey";
+        String testTabularTag2 = "Genomics";
+        // We repeat one to test that it is not duplicated
+        String testTabularTag3 = "Genomics";
+        List<String> testTabularTags = List.of(testTabularTag1, testTabularTag2, testTabularTag3);
+        Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, testTabularTags);
+        setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Get file data and check for new tabular tags
+        Response getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken);
+        getFileDataResponse.then().assertThat()
+                .body("data.dataFile.tabularTags", hasItem(testTabularTag1))
+                .body("data.dataFile.tabularTags", hasItem(testTabularTag2))
+                .statusCode(OK.getStatusCode());
+
+        int actualTabularTagsCount = getFileDataResponse.jsonPath().getList("data.dataFile.tabularTags").size();
+        assertEquals(2, actualTabularTagsCount);
+
+        // Set invalid tabular tag
+        String testInvalidTabularTag = "Invalid";
+        setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(testInvalidTabularTag));
+        setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+
+        // Get file data and check tabular tags are unaltered
+        getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken);
+        getFileDataResponse.then().assertThat()
+                .body("data.dataFile.tabularTags", hasItem(testTabularTag1))
+                .body("data.dataFile.tabularTags", hasItem(testTabularTag2))
+                .statusCode(OK.getStatusCode());
+
+        actualTabularTagsCount = getFileDataResponse.jsonPath().getList("data.dataFile.tabularTags").size();
+        assertEquals(2, actualTabularTagsCount);
+
+        // Should receive an error when calling the endpoint for a non-tabular file
+        String pathToTestFile = "src/test/resources/images/coffeeshop.png";
+        Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        String nonTabularFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
+
+        setFileTabularTagsResponse = UtilIT.setFileTabularTags(nonTabularFileId, apiToken, List.of(testInvalidTabularTag));
+        setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+    }
+
+    @Test
+    public void testGetHasBeenDeleted() {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+
+        // Upload test file
+        String pathToTestFile = "src/test/resources/images/coffeeshop.png";
+        Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken);
+        uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id");
+
+        // Publish dataverse and dataset
+        Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
+        publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken);
+        publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Assert that the file has not been deleted
+        Response getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken);
+        getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode());
+        boolean fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data");
+        assertFalse(fileHasBeenDeleted);
+
+        // Delete test file
+        Response deleteFileInDatasetResponse = UtilIT.deleteFileInDataset(Integer.parseInt(dataFileId), apiToken);
+        deleteFileInDatasetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Assert that the file has been deleted
+        getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken);
+        getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode());
+        fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data");
+        assertTrue(fileHasBeenDeleted);
+    }
+    
+    @Test
+    public void testCollectionStorageQuotas() {
+        // A minimal storage quota functionality test: 
+        // - We create a collection and define a storage quota
+        // - We configure Dataverse to enforce it 
+        // - We confirm that we can upload a file with the size under the quota
+        // - We confirm that we cannot upload a file once the quota is reached
+        // - We disable the quota on the collection via the API
+        
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        String username = UtilIT.getUsernameFromResponse(createUser);
+        Response makeSuperUser = UtilIT.makeSuperUser(username);
+        assertEquals(200, makeSuperUser.getStatusCode());
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+        
+        System.out.println("dataset id: "+datasetId);
+        
+        Response checkQuotaResponse = UtilIT.checkCollectionQuota(dataverseAlias, apiToken);
+        checkQuotaResponse.then().assertThat().statusCode(OK.getStatusCode());
+        // This brand new collection shouldn't have any quota defined yet: 
+        assertEquals(BundleUtil.getStringFromBundle("dataverse.storage.quota.notdefined"), JsonPath.from(checkQuotaResponse.body().asString()).getString("data.message"));
+        
+        // Set quota to 1K:
+        Response setQuotaResponse = UtilIT.setCollectionQuota(dataverseAlias, 1024, apiToken);
+        setQuotaResponse.then().assertThat().statusCode(OK.getStatusCode());
+        assertEquals(BundleUtil.getStringFromBundle("dataverse.storage.quota.updated"), JsonPath.from(setQuotaResponse.body().asString()).getString("data.message"));
+        
+        // Check again:
+        checkQuotaResponse = UtilIT.checkCollectionQuota(dataverseAlias, apiToken);
+        checkQuotaResponse.then().assertThat().statusCode(OK.getStatusCode());
+        String expectedApiMessage = BundleUtil.getStringFromBundle("dataverse.storage.quota.allocation", Arrays.asList("1,024"));
+        assertEquals(expectedApiMessage, JsonPath.from(checkQuotaResponse.body().asString()).getString("data.message"));
+
+        System.out.println(expectedApiMessage);
+        
+        UtilIT.enableSetting(SettingsServiceBean.Key.UseStorageQuotas);
+                
+        String pathToFile306bytes = "src/test/resources/FileRecordJobIT.properties"; 
+        String pathToFile1787bytes = "src/test/resources/datacite.xml";
+
+        // Upload a small file: 
+        
+        Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToFile306bytes, Json.createObjectBuilder().build(), apiToken);
+        uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
+        
+        // Check the recorded storage use: 
+        
+        Response checkStorageUseResponse = UtilIT.checkCollectionStorageUse(dataverseAlias, apiToken);
+        checkStorageUseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        expectedApiMessage = BundleUtil.getStringFromBundle("dataverse.storage.use", Arrays.asList("306"));
+        assertEquals(expectedApiMessage, JsonPath.from(checkStorageUseResponse.body().asString()).getString("data.message"));
+
+        System.out.println(expectedApiMessage);
+        
+        // Attempt to upload the second file - this should get us over the quota, 
+        // so it should be rejected:
+        
+        uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToFile1787bytes, Json.createObjectBuilder().build(), apiToken);
+        uploadResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode());
+        // We should get this error message made up from 2 Bundle strings:
+        expectedApiMessage = BundleUtil.getStringFromBundle("file.addreplace.error.ingest_create_file_err");
+        expectedApiMessage = expectedApiMessage + " " + BundleUtil.getStringFromBundle("file.addreplace.error.quota_exceeded", Arrays.asList("1.7 KB", "718 B"));
+        assertEquals(expectedApiMessage, JsonPath.from(uploadResponse.body().asString()).getString("message"));
+        
+        System.out.println(expectedApiMessage);
+        
+        // Check Storage Use again - should be unchanged: 
+        
+        checkStorageUseResponse = UtilIT.checkCollectionStorageUse(dataverseAlias, apiToken);
+        checkStorageUseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        expectedApiMessage = BundleUtil.getStringFromBundle("dataverse.storage.use", Arrays.asList("306"));
+        assertEquals(expectedApiMessage, JsonPath.from(checkStorageUseResponse.body().asString()).getString("data.message"));
+
+        // Disable the quota on the collection; try again:
+        
+        Response disableQuotaResponse = UtilIT.disableCollectionQuota(dataverseAlias, apiToken);
+        disableQuotaResponse.then().assertThat().statusCode(OK.getStatusCode());
+        expectedApiMessage = BundleUtil.getStringFromBundle("dataverse.storage.quota.deleted");
+        assertEquals(expectedApiMessage, JsonPath.from(disableQuotaResponse.body().asString()).getString("data.message"));
+
+        // Check again: 
+        
+        checkQuotaResponse = UtilIT.checkCollectionQuota(dataverseAlias, apiToken);
+        checkQuotaResponse.then().assertThat().statusCode(OK.getStatusCode());
+        // ... should say "no quota", again: 
+        assertEquals(BundleUtil.getStringFromBundle("dataverse.storage.quota.notdefined"), JsonPath.from(checkQuotaResponse.body().asString()).getString("data.message"));
+        
+        // And try to upload the larger file again:
+        
+        uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToFile1787bytes, Json.createObjectBuilder().build(), apiToken);
+        // ... should work this time around:
+        uploadResponse.then().assertThat().statusCode(OK.getStatusCode());
+            
+        // Let's confirm that the total storage use has been properly implemented:
+
+        //try {sleep(1000);}catch(InterruptedException ie){}
+        
+        checkStorageUseResponse = UtilIT.checkCollectionStorageUse(dataverseAlias, apiToken);
+        checkStorageUseResponse.then().assertThat().statusCode(OK.getStatusCode());
+        expectedApiMessage = BundleUtil.getStringFromBundle("dataverse.storage.use", Arrays.asList("2,093"));
+        assertEquals(expectedApiMessage, JsonPath.from(checkStorageUseResponse.body().asString()).getString("data.message"));
+
+        System.out.println(expectedApiMessage);
+        
+        // @todo: a test for the storage use hierarchy? - create a couple of 
+        // sub-collections, upload a file into a dataset in the farthest branch 
+        // collection, make sure the usage has been incremented all the way up 
+        // to the root? 
+        
+        UtilIT.deleteSetting(SettingsServiceBean.Key.UseStorageQuotas);
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java
new file mode 100644
index 00000000000..e788efc9c87
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/FitsIT.java
@@ -0,0 +1,79 @@
+package edu.harvard.iq.dataverse.api;
+
+import io.restassured.RestAssured;
+import static io.restassured.path.json.JsonPath.with;
+import io.restassured.response.Response;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+public class FitsIT {
+
+    @BeforeAll
+    public static void setUp() {
+        RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+    }
+
+    @Test
+    public void testAstroFieldsFromFits() throws IOException {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        String username = UtilIT.getUsernameFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.prettyPrint();
+        createDataverseResponse.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response setMetadataBlocks = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken);
+        setMetadataBlocks.prettyPrint();
+        setMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDataset.prettyPrint();
+        createDataset.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset);
+        String datasetPid = UtilIT.getDatasetPersistentIdFromResponse(createDataset);
+
+        // "FOS 2 x 2064 primary array spectrum containing the flux and wavelength arrays, plus a small table extension"
+        // from https://fits.gsfc.nasa.gov/fits_samples.html
+        String pathToFile = "src/test/resources/fits/FOSy19g0309t_c2f.fits";
+
+        Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+        uploadFile.prettyPrint();
+        uploadFile.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response getJson = UtilIT.nativeGet(datasetId, apiToken);
+        getJson.prettyPrint();
+        getJson.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.latestVersion.files[0].description", equalTo("FITS file, 2 HDUs total:\nThe primary HDU; 1 Table HDU(s) 1 Image HDU(s); \nThe following recognized metadata keys have been found in the FITS file:\nCRVAL2; NAXIS; INSTRUME; NAXIS1; NAXIS0; EXPTIME; CD1_1; CRVAL1; TARGNAME; DATE-OBS; \n"))
+                .body("data.latestVersion.metadataBlocks.astrophysics.fields[0].value[0]", equalTo("Image"));
+
+        // a bit more precise than the check for "Image" above (but annoyingly fiddly)
+        List<JsonObject> astroTypeFromNativeGet = with(getJson.body().asString()).param("astroType", "astroType")
+                .getJsonObject("data.latestVersion.metadataBlocks.astrophysics.fields.findAll { fields -> fields.typeName == astroType }");
+        Map firstAstroTypeFromNativeGet = astroTypeFromNativeGet.get(0);
+        assertTrue(firstAstroTypeFromNativeGet.toString().contains("Image"));
+
+        List<JsonObject> coverageTemportalFromNativeGet = with(getJson.body().asString()).param("coverageTemporal", "coverage.Temporal")
+                .getJsonObject("data.latestVersion.metadataBlocks.astrophysics.fields.findAll { fields -> fields.typeName == coverageTemporal }");
+        Map firstcoverageTemporalFromNativeGet = coverageTemportalFromNativeGet.get(0);
+        assertTrue(firstcoverageTemporalFromNativeGet.toString().contains("1993"));
+
+    }
+
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java
index 094eb0df77c..d5388e510d2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java
@@ -1,21 +1,23 @@
 package edu.harvard.iq.dataverse.api;
 
 import java.util.logging.Logger;
-import java.util.logging.Level;
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import org.junit.Test;
-import com.jayway.restassured.response.Response;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.ACCEPTED;
-import static javax.ws.rs.core.Response.Status.OK;
+
+import org.junit.jupiter.api.Test;
+
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.ACCEPTED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
 
 /**
  * This class tests Harvesting Client functionality. 
@@ -41,7 +43,7 @@ public class HarvestingClientsIT {
     private static String adminUserAPIKey;
     private static String harvestCollectionAlias; 
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         
@@ -75,11 +77,11 @@ private static void setupCollection() {
     }
 
     @Test
-    public void testCreateEditDeleteClient() {
+    public void testCreateEditDeleteClient() throws InterruptedException {
         // This method focuses on testing the native Dataverse harvesting client
         // API. 
         
-        String nickName = UtilIT.getRandomString(6);
+        String nickName = "h" + UtilIT.getRandomString(6);
         
 
         String clientApiPath = String.format(HARVEST_CLIENTS_API+"%s", nickName);
@@ -130,7 +132,7 @@ public void testCreateEditDeleteClient() {
         // ... and validate the values:
         
         getClientResponse.then().assertThat()
-                .body("status", equalTo(AbstractApiBean.STATUS_OK))
+                .body("status", equalTo(ApiConstants.STATUS_OK))
                 .body("data.type", equalTo("oai"))
                 .body("data.nickName", equalTo(nickName))
                 .body("data.archiveDescription", equalTo(ARCHIVE_DESCRIPTION))
@@ -166,7 +168,7 @@ public void testHarvestingClientRun()  throws InterruptedException {
         // method, we don't need to pay too much attention to this method, aside 
         // from confirming the expected HTTP status code.
         
-        String nickName = UtilIT.getRandomString(6);
+        String nickName = "h" + UtilIT.getRandomString(6);
 
         String clientApiPath = String.format(HARVEST_CLIENTS_API+"%s", nickName);
         String clientJson = String.format("{\"dataverseAlias\":\"%s\","
@@ -176,7 +178,7 @@ public void testHarvestingClientRun()  throws InterruptedException {
                 + "\"set\":\"%s\","
                 + "\"metadataFormat\":\"%s\"}", 
                 harvestCollectionAlias, HARVEST_URL, ARCHIVE_URL, CONTROL_OAI_SET, HARVEST_METADATA_FORMAT);
-                
+        
         Response createResponse = given()
                 .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey)
                 .body(clientJson)
@@ -214,8 +216,8 @@ public void testHarvestingClientRun()  throws InterruptedException {
         
             assertEquals(OK.getStatusCode(), getClientResponse.getStatusCode());
             JsonPath responseJsonPath = getClientResponse.body().jsonPath();
-            assertNotNull("Invalid JSON in GET client response", responseJsonPath);
-            assertEquals(AbstractApiBean.STATUS_OK, responseJsonPath.getString("status")); 
+            assertNotNull(responseJsonPath, "Invalid JSON in GET client response");
+            assertEquals(ApiConstants.STATUS_OK, responseJsonPath.getString("status"));
             
             String clientStatus = responseJsonPath.getString("data.status");
             assertNotNull(clientStatus);
@@ -228,10 +230,10 @@ public void testHarvestingClientRun()  throws InterruptedException {
                         + getClientResponse.prettyPrint());
                 // Check the values in the response:
                 // a) Confirm that the harvest has completed: 
-                assertEquals("Unexpected client status: "+clientStatus, "inActive", clientStatus);
+                assertEquals("inActive", clientStatus, "Unexpected client status: "+clientStatus);
                 
                 // b) Confirm that it has actually succeeded:
-                assertEquals("Last harvest not reported a success (took "+i+" seconds)", "SUCCESS", responseJsonPath.getString("data.lastResult"));
+                assertEquals("SUCCESS", responseJsonPath.getString("data.lastResult"), "Last harvest not reported a success (took "+i+" seconds)");
                 String harvestTimeStamp = responseJsonPath.getString("data.lastHarvest");
                 assertNotNull(harvestTimeStamp); 
                 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java
index 94a8e373848..e02964ef28f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingServerIT.java
@@ -2,26 +2,29 @@
 
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import org.junit.BeforeClass;
-import org.junit.AfterClass;
-import org.junit.Test;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import com.jayway.restassured.response.Response;
-import com.jayway.restassured.path.xml.XmlPath;
-import com.jayway.restassured.path.xml.element.Node;
+import io.restassured.response.Response;
+import io.restassured.path.xml.XmlPath;
+import io.restassured.path.xml.element.Node;
+
 import java.util.ArrayList;
 import java.util.Collections;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
 import java.util.List;
 import java.util.Set;
 import java.util.HashSet;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertEquals;
+
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  * Tests for the Harvesting Server functionality
@@ -38,7 +41,7 @@ public class HarvestingServerIT {
     private static String singleSetDatasetPersistentId;
     private static List<String> extraDatasetsIdentifiers = new ArrayList<>();
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
 	// enable harvesting server
@@ -53,7 +56,7 @@ public static void setUpClass() {
         
     }
 
-    @AfterClass
+    @AfterAll
     public static void afterClass() {
 	// disable harvesting server (default value)
 	Response enableHarvestingServerResponse = UtilIT.setSetting(SettingsServiceBean.Key.OAIServerEnabled,"false");
@@ -240,7 +243,7 @@ public void testNativeSetAPI() {
         assertEquals(200, getSetResponse.getStatusCode());
         
         getSetResponse.then().assertThat()
-                .body("status", equalTo(AbstractApiBean.STATUS_OK))
+                .body("status", equalTo(ApiConstants.STATUS_OK))
                 .body("data.definition", equalTo("*"))
                 .body("data.description", equalTo(""))
                 .body("data.name", equalTo(setName));
@@ -353,7 +356,7 @@ public void testSetEditAPIandOAIlistSets() {
         assertEquals(200, getSetResponse.getStatusCode());
         
         getSetResponse.then().assertThat()
-                .body("status", equalTo(AbstractApiBean.STATUS_OK))
+                .body("status", equalTo(ApiConstants.STATUS_OK))
                 .body("data.definition", equalTo(newDefinition))
                 .body("data.description", equalTo(newDescription))
                 .body("data.name", equalTo(setName));
@@ -373,10 +376,10 @@ public void testSetEditAPIandOAIlistSets() {
         List<Node> listSets = responseXmlPath.getList("OAI-PMH.ListSets.set.list().findAll{it.setName=='"+setName+"'}", Node.class);
         
         // 2a. Confirm that our set is listed:
-        assertNotNull("Unexpected response from ListSets", listSets);
-        assertTrue("Newly-created set isn't properly listed by the OAI server", listSets.size() == 1);
+        assertNotNull(listSets, "Unexpected response from ListSets");
+        assertEquals(1, listSets.size(), "Newly-created set isn't properly listed by the OAI server");
         // 2b. Confirm that the set entry contains the updated description: 
-        assertEquals("Incorrect description in the ListSets entry", newDescription, listSets.get(0).getPath("setDescription.metadata.element.field", String.class));
+        assertEquals(newDescription, listSets.get(0).getPath("setDescription.metadata.element.field", String.class), "Incorrect description in the ListSets entry");
         
         // ok, the xml record looks good! 
 
@@ -603,12 +606,11 @@ public void testMultiRecordOaiSet() throws InterruptedException {
 
         // Validate the payload of the ListIdentifiers response:
         // 1a) There should be 2 items listed:
-        assertEquals("Wrong number of items on the first ListIdentifiers page",
-                2, ret.size());
+        assertEquals(2, ret.size(), "Wrong number of items on the first ListIdentifiers page");
         
         // 1b) The response contains a resumptionToken for the next page of items:
         String resumptionToken = responseXmlPath.getString("OAI-PMH.ListIdentifiers.resumptionToken");
-        assertNotNull("No resumption token in the ListIdentifiers response (has the jvm option dataverse.oai.server.maxidentifiers been configured?)", resumptionToken);
+        assertNotNull(resumptionToken, "No resumption token in the ListIdentifiers response (has the jvm option dataverse.oai.server.maxidentifiers been configured?)");
         
         // 1c) The total number of items in the set (5) is listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListIdentifiers.resumptionToken.@completeListSize"));
@@ -650,12 +652,11 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListIdentifiers response:
         // 2a) There should still be 2 items listed:
-        assertEquals("Wrong number of items on the second ListIdentifiers page",
-                2, ret.size());
+        assertEquals(2, ret.size(), "Wrong number of items on the second ListIdentifiers page");
         
         // 2b) The response should contain a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListIdentifiers.resumptionToken");
-        assertNotNull("No resumption token in the ListIdentifiers response", resumptionToken);
+        assertNotNull(resumptionToken, "No resumption token in the ListIdentifiers response");
         
         // 2c) The total number of items in the set (5) is listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListIdentifiers.resumptionToken.@completeListSize"));
@@ -690,13 +691,12 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListIdentifiers response:
         // 3a) There should be only 1 item listed:
-        assertEquals("Wrong number of items on the final ListIdentifiers page", 
-                1, ret.size());
+        assertEquals(1, ret.size(), "Wrong number of items on the final ListIdentifiers page");
         
         // 3b) The response contains a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListIdentifiers.resumptionToken");
-        assertNotNull("No resumption token in the final ListIdentifiers response", resumptionToken);
-        assertTrue("Non-empty resumption token in the final ListIdentifiers response", "".equals(resumptionToken));
+        assertNotNull(resumptionToken, "No resumption token in the final ListIdentifiers response");
+        assertEquals("", resumptionToken, "Non-empty resumption token in the final ListIdentifiers response");
         
         // 3c) The total number of items in the set (5) is still listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListIdentifiers.resumptionToken.@completeListSize"));
@@ -717,8 +717,7 @@ public void testMultiRecordOaiSet() throws InterruptedException {
             allDatasetsListed = allDatasetsListed && persistentIdsInListIdentifiers.contains(persistentId); 
         }
         
-        assertTrue("Control datasets not properly listed in the paged ListIdentifiers response", 
-                allDatasetsListed);
+        assertTrue(allDatasetsListed, "Control datasets not properly listed in the paged ListIdentifiers response");
         
         // OK, it is safe to assume ListIdentifiers works as it should in page mode.
         
@@ -743,12 +742,11 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListRecords response:
         // 4a) There should be 2 items listed:
-        assertEquals("Wrong number of items on the first ListRecords page",
-                2, ret.size());
+        assertEquals(2, ret.size(), "Wrong number of items on the first ListRecords page");
         
         // 4b) The response contains a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListRecords.resumptionToken");
-        assertNotNull("No resumption token in the ListRecords response (has the jvm option dataverse.oai.server.maxrecords been configured?)", resumptionToken);
+        assertNotNull(resumptionToken, "No resumption token in the ListRecords response (has the jvm option dataverse.oai.server.maxrecords been configured?)");
         
         // 4c) The total number of items in the set (5) is listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListRecords.resumptionToken.@completeListSize"));
@@ -784,12 +782,11 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListRecords response:
         // 4a) There should still be 2 items listed:
-        assertEquals("Wrong number of items on the second ListRecords page",
-                2, ret.size());
+        assertEquals(2, ret.size(), "Wrong number of items on the second ListRecords page");
         
         // 4b) The response should contain a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListRecords.resumptionToken");
-        assertNotNull("No resumption token in the ListRecords response", resumptionToken);
+        assertNotNull(resumptionToken, "No resumption token in the ListRecords response");
         
         // 4c) The total number of items in the set (5) is listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListRecords.resumptionToken.@completeListSize"));
@@ -824,13 +821,12 @@ public void testMultiRecordOaiSet() throws InterruptedException {
         
         // Validate the payload of the ListRecords response:
         // 6a) There should be only 1 item listed:
-        assertEquals("Wrong number of items on the final ListRecords page", 
-                1, ret.size());
+        assertEquals(1, ret.size(), "Wrong number of items on the final ListRecords page");
         
         // 6b) The response contains a resumptionToken for the next page of items:
         resumptionToken = responseXmlPath.getString("OAI-PMH.ListRecords.resumptionToken");
-        assertNotNull("No resumption token in the final ListRecords response", resumptionToken);
-        assertTrue("Non-empty resumption token in the final ListRecords response", "".equals(resumptionToken));
+        assertNotNull(resumptionToken, "No resumption token in the final ListRecords response");
+        assertEquals("", resumptionToken, "Non-empty resumption token in the final ListRecords response");
         
         // 6c) The total number of items in the set (5) is still listed correctly:
         assertEquals(5, responseXmlPath.getInt("OAI-PMH.ListRecords.resumptionToken.@completeListSize"));
@@ -851,8 +847,7 @@ public void testMultiRecordOaiSet() throws InterruptedException {
             allDatasetsListed = allDatasetsListed && persistentIdsInListRecords.contains(persistentId); 
         }
         
-        assertTrue("Control datasets not properly listed in the paged ListRecords response", 
-                allDatasetsListed);
+        assertTrue(allDatasetsListed, "Control datasets not properly listed in the paged ListRecords response");
         
         // OK, it is safe to assume ListRecords works as it should in page mode
         // as well. 
@@ -863,7 +858,7 @@ public void testMultiRecordOaiSet() throws InterruptedException {
                 .header(UtilIT.API_TOKEN_HTTP_HEADER, adminUserAPIKey)
                 .delete(setPath);
         logger.info("deleteResponse.getStatusCode(): " + deleteResponse.getStatusCode());
-        assertEquals("Failed to delete the control multi-record set", 200, deleteResponse.getStatusCode());
+        assertEquals(200, deleteResponse.getStatusCode(), "Failed to delete the control multi-record set");
     }
     
     // TODO: 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java
index 89aeaa6b2ee..307eef48773 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/InReviewWorkflowIT.java
@@ -1,31 +1,29 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.path.xml.XmlPath;
-import com.jayway.restassured.response.Response;
-import static edu.harvard.iq.dataverse.api.AccessIT.apiToken;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.path.xml.XmlPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.NO_CONTENT;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.NO_CONTENT;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Assert;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class InReviewWorkflowIT {
 
     private static final Logger logger = Logger.getLogger(DatasetsIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
 
@@ -170,7 +168,7 @@ public void testCuratorSendsCommentsToAuthor() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
         String citation = XmlPath.from(atomEntry.body().asString()).getString("bibliographicCitation");
         System.out.println("citation: " + citation);
-        Assert.assertTrue(citation.contains("A Better Title"));
+        assertTrue(citation.contains("A Better Title"));
 
         // The author tries to update the title while the dataset is in review via native.
         String pathToJsonFile = "doc/sphinx-guides/source/_static/api/dataset-update-metadata.json";
@@ -186,7 +184,7 @@ public void testCuratorSendsCommentsToAuthor() throws InterruptedException {
         String citationAuthorNative = XmlPath.from(atomEntryAuthorNative.body().asString()).getString("bibliographicCitation");
         System.out.println("citation: " + citationAuthorNative);
         // The author was unable to change the title.
-        Assert.assertTrue(citationAuthorNative.contains("A Better Title"));
+        assertTrue(citationAuthorNative.contains("A Better Title"));
 
         // The author remembers she forgot to add a file and tries to upload it while
         // the dataset is in review via native API but this fails.
@@ -239,7 +237,7 @@ public void testCuratorSendsCommentsToAuthor() throws InterruptedException {
                 // because the dataset is still locked when we try to edit it, 
                 // a few lines down. -- L.A. Oct. 2018  
                 // Changes to test for ingest lock and 3 seconds duration SEK 09/2019 #6128
-                assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId, "Ingest", curatorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+                assertTrue(UtilIT.sleepForLock(datasetId, "Ingest", curatorApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest);
                // Thread.sleep(10000);
             }
         }
@@ -273,7 +271,7 @@ public void testCuratorSendsCommentsToAuthor() throws InterruptedException {
                 .statusCode(OK.getStatusCode());
         String citationCuratorNative = XmlPath.from(atomEntryCuratorNative.body().asString()).getString("bibliographicCitation");
         System.out.println("citation: " + citationCuratorNative);
-        Assert.assertTrue(citationCuratorNative.contains("newTitle"));
+        assertTrue(citationCuratorNative.contains("newTitle"));
         // END https://github.com/IQSS/dataverse/issues/4139
 
         // TODO: test where curator neglecting to leave a comment. Should fail with "reason for return" required.
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/IndexIT.java b/src/test/java/edu/harvard/iq/dataverse/api/IndexIT.java
index 313c4c2cfaf..2d946b4012b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/IndexIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/IndexIT.java
@@ -1,29 +1,29 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.ArrayList;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.NO_CONTENT;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.NO_CONTENT;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import org.hamcrest.CoreMatchers;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.After;
+import org.junit.jupiter.api.AfterEach;
 
 public class IndexIT {
 
     private static final Logger logger = Logger.getLogger(IndexIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
 
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
@@ -100,11 +100,11 @@ public void testIndexStatus() {
       
     }
    
-    @After
+    @AfterEach
     public void tearDownDataverse() {
         }
 
-    @AfterClass
+    @AfterAll
     public static void cleanup() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
index 73a1171bf1b..3d5691dbe03 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
@@ -1,39 +1,40 @@
 package edu.harvard.iq.dataverse.api;
 
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.response.Response;
+import static io.restassured.RestAssured.given;
+
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.notNullValue;
 
 public class InfoIT {
 
-    @Test
-    public void testGetDatasetPublishPopupCustomText() {
-
-        given().urlEncodingEnabled(false)
-                .body("Hello world!")
-                .put("/api/admin/settings/"
-                        + SettingsServiceBean.Key.DatasetPublishPopupCustomText);
+    @BeforeAll
+    public static void setUpClass() {
+        UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths);
+        UtilIT.deleteSetting(SettingsServiceBean.Key.DatasetPublishPopupCustomText);
+    }
 
-        Response response = given().urlEncodingEnabled(false)
-                .get("/api/info/settings/" + SettingsServiceBean.Key.DatasetPublishPopupCustomText);
-        response.prettyPrint();
-        response.then().assertThat().statusCode(200)
-                .body("data.message", equalTo("Hello world!"));
+    @AfterAll
+    public static void afterClass() {
+        UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths);
+        UtilIT.deleteSetting(SettingsServiceBean.Key.DatasetPublishPopupCustomText);
+    }
 
-        given().urlEncodingEnabled(false)
-                .delete("/api/admin/settings/"
-                        + SettingsServiceBean.Key.DatasetPublishPopupCustomText);
+    @Test
+    public void testGetDatasetPublishPopupCustomText() {
+        testSettingEndpoint(SettingsServiceBean.Key.DatasetPublishPopupCustomText, "Hello world!");
+    }
 
-        response = given().urlEncodingEnabled(false)
-                .get("/api/info/settings/" + SettingsServiceBean.Key.DatasetPublishPopupCustomText);
-        response.prettyPrint();
-        response.then().assertThat().statusCode(404)
-                .body("message", equalTo("Setting "
-                        + SettingsServiceBean.Key.DatasetPublishPopupCustomText
-                        + " not found"));
+    @Test
+    public void testGetMaxEmbargoDurationInMonths() {
+        testSettingEndpoint(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12");
     }
 
     @Test
@@ -41,7 +42,7 @@ public void testGetVersion() {
         Response response = given().urlEncodingEnabled(false)
                 .get("/api/info/version");
         response.prettyPrint();
-        response.then().assertThat().statusCode(200)
+        response.then().assertThat().statusCode(OK.getStatusCode())
                 .body("data.version", notNullValue());
     }
 
@@ -50,16 +51,49 @@ public void testGetServer() {
         Response response = given().urlEncodingEnabled(false)
                 .get("/api/info/server");
         response.prettyPrint();
-        response.then().assertThat().statusCode(200)
+        response.then().assertThat().statusCode(OK.getStatusCode())
                 .body("data.message", notNullValue());
     }
-    
+
     @Test
-    public void getTermsOfUse() {
+    public void testGetTermsOfUse() {
         Response response = given().urlEncodingEnabled(false)
                 .get("/api/info/apiTermsOfUse");
         response.prettyPrint();
-        response.then().assertThat().statusCode(200)
+        response.then().assertThat().statusCode(OK.getStatusCode())
                 .body("data.message", notNullValue());
     }
+
+    @Test
+    public void testGetAllowsIncompleteMetadata() {
+        Response response = given().urlEncodingEnabled(false)
+                .get("/api/info/settings/incompleteMetadataViaApi");
+        response.prettyPrint();
+        response.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data", notNullValue());
+    }
+
+    @Test
+    public void testGetZipDownloadLimit() {
+        Response response = given().urlEncodingEnabled(false)
+                .get("/api/info/zipDownloadLimit");
+        response.prettyPrint();
+        response.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data", notNullValue());
+    }
+
+    private void testSettingEndpoint(SettingsServiceBean.Key settingKey, String testSettingValue) {
+        String endpoint =  "/api/info/settings/" + settingKey;
+        // Setting not found
+        Response response = given().urlEncodingEnabled(false).get(endpoint);
+        response.prettyPrint();
+        response.then().assertThat().statusCode(NOT_FOUND.getStatusCode())
+                .body("message", equalTo("Setting " + settingKey + " not found"));
+        // Setting exists
+        UtilIT.setSetting(settingKey, testSettingValue);
+        response = given().urlEncodingEnabled(false).get(endpoint);
+        response.prettyPrint();
+        response.then().assertThat().statusCode(OK.getStatusCode())
+                .body("data.message", equalTo(testSettingValue));
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InvalidCharactersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InvalidCharactersIT.java
index 2fb412ef1cc..2cd7942cb5f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/InvalidCharactersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/InvalidCharactersIT.java
@@ -1,22 +1,22 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.Matchers;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class InvalidCharactersIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/IpGroupsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/IpGroupsIT.java
index 88b8a9fc458..1c7e7b05650 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/IpGroupsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/IpGroupsIT.java
@@ -1,27 +1,27 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class IpGroupsIT {
 
     private static final Logger logger = Logger.getLogger(IpGroupsIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LazyRefTest.java b/src/test/java/edu/harvard/iq/dataverse/api/LazyRefTest.java
index 72cf6fae875..382a80a3493 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/LazyRefTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LazyRefTest.java
@@ -3,8 +3,7 @@
  */
 package edu.harvard.iq.dataverse.api;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LicensesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/LicensesIT.java
index d6bfdb96777..8d1af322cbd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/LicensesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LicensesIT.java
@@ -1,44 +1,24 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProvider;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GitHubOAuth2AP;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP;
-import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import org.junit.Test;
-import org.junit.BeforeClass;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeAll;
 
-import java.util.Map;
-import java.util.UUID;
 import java.util.logging.Logger;
 
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static org.junit.Assert.*;
-import static org.hamcrest.CoreMatchers.equalTo;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.junit.Assert.assertTrue;
-import org.junit.Ignore;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class LicensesIT {
 
     private static final Logger logger = Logger.getLogger(LicensesIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java b/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
index 76e9b7d6bc8..907d3dec4bc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
@@ -1,22 +1,22 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import edu.harvard.iq.dataverse.util.BundleUtil;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class LinkIT {
 
     private static final Logger logger = Logger.getLogger(LinkIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LogoutIT.java b/src/test/java/edu/harvard/iq/dataverse/api/LogoutIT.java
new file mode 100644
index 00000000000..53fa500a328
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LogoutIT.java
@@ -0,0 +1,24 @@
+package edu.harvard.iq.dataverse.api;
+
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class LogoutIT {
+
+    @BeforeAll
+    public static void setUpClass() {
+        RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+    }
+
+    @Test
+    public void testLogout() {
+        // Test failure because feature flag is turned off
+        Response logoutResponse = UtilIT.logout();
+        assertEquals(INTERNAL_SERVER_ERROR.getStatusCode(), logoutResponse.getStatusCode());
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java
index c210d471ce2..7a113fd4caa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MakeDataCountApiIT.java
@@ -1,21 +1,21 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
 import org.apache.commons.io.FileUtils;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 public class MakeDataCountApiIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -181,7 +181,7 @@ public void testMakeDataCountGetMetric() throws IOException {
      *
      * update dvobject set authority = '10.7910' where id = 10;
      */
-    @Ignore
+    @Disabled
     @Test
     public void testMakeDataCountDownloadCitation() {
         String idOrPersistentIdOfDataset = "doi:10.7910/DVN/HQZOOB";
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
index 05b7a7910ff..c301e158b4e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java
@@ -1,26 +1,60 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
-import static javax.ws.rs.core.Response.Status.OK;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import org.hamcrest.CoreMatchers;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
 public class MetadataBlocksIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
     @Test
-    public void testGetCitationBlock() {
+    void testGetCitationBlock() {
         Response getCitationBlock = UtilIT.getMetadataBlock("citation");
         getCitationBlock.prettyPrint();
         getCitationBlock.then().assertThat()
                 .statusCode(OK.getStatusCode())
                 .body("data.fields.subject.controlledVocabularyValues[0]", CoreMatchers.is("Agricultural Sciences"));
     }
+    
+    @Test
+    void testDatasetWithAllDefaultMetadata() {
+        // given
+        Response createUser = UtilIT.createRandomUser();
+        assumeTrue(createUser.statusCode() < 300,
+            "code=" + createUser.statusCode() +
+            ", response=" + createUser.prettyPrint());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        assumeFalse(apiToken == null || apiToken.isBlank());
+        
+        Response createCollection = UtilIT.createRandomDataverse(apiToken);
+        assumeTrue(createCollection.statusCode() < 300,
+            "code=" + createCollection.statusCode() +
+            ", response=" + createCollection.prettyPrint());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createCollection);
+        assumeFalse(dataverseAlias == null || dataverseAlias.isBlank());
+        
+        // when
+        String pathToJsonFile = "scripts/api/data/dataset-create-new-all-default-fields.json";
+        Response createDataset = UtilIT.createDatasetViaNativeApi(dataverseAlias, pathToJsonFile, apiToken);
+        
+        // then
+        assertEquals(CREATED.getStatusCode(), createDataset.statusCode(),
+           "code=" + createDataset.statusCode() +
+            ", response=" + createDataset.prettyPrint());
+        createDataset.then().assertThat()
+            .body("status", CoreMatchers.equalTo("OK"));
+    }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java
index f6478bf379e..e3328eefb4a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MetricsIT.java
@@ -1,28 +1,29 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.metrics.MetricsUtil;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.OK;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import org.junit.jupiter.api.AfterAll;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 //TODO: These tests are fairly flawed as they don't actually add data to compare on.
 //To improve these tests we should try adding data and see if the number DOESN'T
 //go up to show that the caching worked
 public class MetricsIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         UtilIT.clearMetricCache();
     }
 
-    @AfterClass
+    @AfterAll
     public static void cleanUpClass() {
         UtilIT.clearMetricCache();
     }
@@ -255,6 +256,12 @@ public void testGetDataverseBySubject() {
         response = UtilIT.metricsDataversesBySubject("dataLocation=local");
         response.then().assertThat()
                 .statusCode(BAD_REQUEST.getStatusCode());
+        
+        // Test cache delete for single metric - just tests that the call succeeds since
+        // the client can't really tell whether it gets a new/cached value
+
+        response = UtilIT.clearMetricCache("dataversesBySubject");
+        response.then().assertThat().statusCode(OK.getStatusCode());
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java
index dfe23c7d80a..f7135ce7f3b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/MoveIT.java
@@ -1,32 +1,31 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import static com.jayway.restassured.path.json.JsonPath.with;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import static io.restassured.path.json.JsonPath.with;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import java.io.StringReader;
 import java.util.List;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import org.hamcrest.CoreMatchers;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.Assert;
-import static org.junit.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class MoveIT {
 
     private static final Logger logger = Logger.getLogger(MoveIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -279,8 +278,8 @@ public void testMoveLinkedDataset() {
                 .body("message", equalTo("Use the query parameter forceMove=true to complete the move. This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. "));
 
         JsonObject linksBeforeData = Json.createReader(new StringReader(getLinksBefore.asString())).readObject();
-        Assert.assertEquals("OK", linksBeforeData.getString("status"));
-        Assert.assertEquals(dataverse2Alias + " (id " + dataverse2Id + ")", linksBeforeData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).getString(0));
+        assertEquals("OK", linksBeforeData.getString("status"));
+        assertEquals(dataverse2Alias + " (id " + dataverse2Id + ")", linksBeforeData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).getString(0));
 
         boolean forceMove = true;
         Response forceMoveLinkedDataset = UtilIT.moveDataset(datasetId.toString(), dataverse2Alias, forceMove, superuserApiToken);
@@ -301,14 +300,15 @@ public void testMoveLinkedDataset() {
                 .statusCode(OK.getStatusCode())
                 .body("feed.entry[0].id", CoreMatchers.endsWith(datasetPid));
 
+        UtilIT.sleepForReindex(datasetPid, superuserApiToken, 20);
         Response getLinksAfter = UtilIT.getDatasetLinks(datasetPid, superuserApiToken);
         getLinksAfter.prettyPrint();
         getLinksAfter.then().assertThat()
                 .statusCode(OK.getStatusCode());
 
         JsonObject linksAfterData = Json.createReader(new StringReader(getLinksAfter.asString())).readObject();
-        Assert.assertEquals("OK", linksAfterData.getString("status"));
-        Assert.assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).size());
+        assertEquals("OK", linksAfterData.getString("status"));
+        assertEquals(0, linksAfterData.getJsonObject("data").getJsonArray("dataverses that link to dataset id " + datasetId).size());
 
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/NetcdfIT.java b/src/test/java/edu/harvard/iq/dataverse/api/NetcdfIT.java
index 9716e7aca13..d4dba236051 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/NetcdfIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/NetcdfIT.java
@@ -1,24 +1,25 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.OK;
+import jakarta.json.Json;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import org.hamcrest.CoreMatchers;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class NetcdfIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
@@ -179,4 +180,48 @@ public void testNmclFromNetcdfErrorChecking() throws IOException {
 
     }
 
+    @Test
+    public void testExtraBoundingBoxFromNetcdf() throws IOException {
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        String username = UtilIT.getUsernameFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.prettyPrint();
+        createDataverseResponse.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response setMetadataBlocks = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("geospatial"), apiToken);
+        setMetadataBlocks.prettyPrint();
+        setMetadataBlocks.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDataset.prettyPrint();
+        createDataset.then().assertThat()
+                .statusCode(CREATED.getStatusCode());
+
+        Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset);
+        String datasetPid = UtilIT.getDatasetPersistentIdFromResponse(createDataset);
+
+        // From https://www.ncei.noaa.gov/data/international-comprehensive-ocean-atmosphere/v3/archive/nrt/ICOADS_R3.0.0_1662-10.nc
+        // via https://data.noaa.gov/onestop/collections/details/9bd5c743-0684-4e70-817a-ed977117f80c?f=temporalResolution:1%20Minute%20-%20%3C%201%20Hour;dataFormats:NETCDF
+        String pathToFile = "src/test/resources/netcdf/ICOADS_R3.0.0_1662-10.nc";
+
+        Response uploadFile = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+        uploadFile.prettyPrint();
+        uploadFile.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response getJson = UtilIT.nativeGet(datasetId, apiToken);
+        getJson.prettyPrint();
+        getJson.then().assertThat()
+                .statusCode(OK.getStatusCode())
+                .body("data.latestVersion.metadataBlocks.geospatial.fields[0].value[0].westLongitude.value", equalTo("-16.320007"))
+                .body("data.latestVersion.metadataBlocks.geospatial.fields[0].value[0].eastLongitude.value", equalTo("-6.220001"))
+                .body("data.latestVersion.metadataBlocks.geospatial.fields[0].value[0].northLongitude.value", equalTo("49.62"))
+                .body("data.latestVersion.metadataBlocks.geospatial.fields[0].value[0].southLongitude.value", equalTo("41.8"));
+    }
+
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/NotificationsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/NotificationsIT.java
index 09a14e2d6ad..606bdc6ce5d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/NotificationsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/NotificationsIT.java
@@ -1,20 +1,20 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class NotificationsIT {
 
     private static final Logger logger = Logger.getLogger(NotificationsIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/PidsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/PidsIT.java
index c7ca70e0e1d..808346f021c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/PidsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/PidsIT.java
@@ -1,14 +1,14 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.OK;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 /**
  * In order to execute this test code you must be configured with DataCite
@@ -16,12 +16,12 @@
  */
 public class PidsIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testGetPid() {
         String pid = "";
@@ -94,7 +94,7 @@ public void testReservePid() {
          */
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testDeletePid() {
         String pid = "";
@@ -112,7 +112,7 @@ public void testDeletePid() {
         deletePid.prettyPrint();
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testCannotPublishUntilReserved() {
         Response createUser = UtilIT.createRandomUser();
@@ -154,7 +154,7 @@ public void testCannotPublishUntilReserved() {
                 .statusCode(FORBIDDEN.getStatusCode());
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testDeleteDraftPidOnDelete() {
         Response createUser = UtilIT.createRandomUser();
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java
index 52143eb9981..33323ff4239 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/ProvIT.java
@@ -1,33 +1,42 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static junit.framework.Assert.assertEquals;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.notNullValue;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class ProvIT {
+
+    private static boolean provEnabled = false;
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+        Response provCollectionStatus = UtilIT.getSetting(SettingsServiceBean.Key.ProvCollectionEnabled);
+
+        provEnabled = provCollectionStatus.getStatusCode() == 200;
+        if (!provEnabled) {
+            UtilIT.enableSetting(SettingsServiceBean.Key.ProvCollectionEnabled);
+        }
     }
 
     
-    @Test 
+    @Test
     public void testFreeformDraftActions() {
+
         Response createDepositor = UtilIT.createRandomUser();
         createDepositor.prettyPrint();
         createDepositor.then().assertThat()
@@ -72,6 +81,7 @@ public void testFreeformDraftActions() {
         JsonObject provFreeFormGood = Json.createObjectBuilder()
                 .add("text", "I inherited this file from my grandfather.")
                 .build();
+        
         Response uploadProvFreeForm = UtilIT.uploadProvFreeForm(dataFileId.toString(), provFreeFormGood, apiTokenForDepositor);
         uploadProvFreeForm.prettyPrint();
         uploadProvFreeForm.then().assertThat()
@@ -81,8 +91,7 @@ public void testFreeformDraftActions() {
         datasetVersions.prettyPrint();
         datasetVersions.then().assertThat()
                 .body("data[0].versionState", equalTo("DRAFT"));
-        
-        
+     
     }
     
     @Test
@@ -197,6 +206,7 @@ public void testAddProvFile() {
                 .body("data.json", notNullValue(String.class));
         assertEquals(200, getProvJson.getStatusCode());
         
+        
         // TODO: Test that if provenance already exists in CPL (e.g. cplId in fileMetadata is not 0) upload returns error.
         //       There are currently no api endpoints to set up up this test.
         
@@ -205,11 +215,13 @@ public void testAddProvFile() {
         deleteProvJson.then().assertThat()
                 .statusCode(FORBIDDEN.getStatusCode()); //cannot delete json of a published dataset
 
-// Command removed, redundant        
-//        Response deleteProvFreeForm = UtilIT.deleteProvFreeForm(dataFileId.toString(), apiTokenForDepositor);
-//        deleteProvFreeForm.prettyPrint();
-//        deleteProvFreeForm.then().assertThat()
-//                .statusCode(OK.getStatusCode());
         
     }
+
+    @AfterAll
+    public static void tearDownClass() {
+        if(!provEnabled){
+            UtilIT.deleteSetting(SettingsServiceBean.Key.ProvCollectionEnabled);
+        }
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RemoteStoreIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RemoteStoreIT.java
index ae5bc8b7316..f653b358b33 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/RemoteStoreIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/RemoteStoreIT.java
@@ -1,17 +1,17 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class RemoteStoreIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java
index d1e2ffb2426..8b5ac917dea 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java
@@ -1,14 +1,16 @@
 
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import java.util.logging.Logger;
-import static junit.framework.Assert.assertEquals;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -18,7 +20,7 @@ public class RolesIT {
     
     private static final Logger logger = Logger.getLogger(AdminIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessDirectIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessDirectIT.java
new file mode 100644
index 00000000000..1e44d952af7
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessDirectIT.java
@@ -0,0 +1,97 @@
+package edu.harvard.iq.dataverse.api;
+
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import io.restassured.specification.RequestSpecification;
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
+import org.apache.commons.lang3.math.NumberUtils;
+import org.junit.jupiter.api.Test;
+
+public class S3AccessDirectIT {
+
+    @Test
+    public void testS3DirectUpload() {
+        // TODO: remove all these constants
+        RestAssured.baseURI = "https://demo.dataverse.org";
+        String apiToken = "";
+        String datasetPid = "doi:10.70122/FK2/UBWSJU";
+        String datasetId = "2106131";
+        long size = 1000000000l;
+
+        Response getUploadUrls = getUploadUrls(datasetPid, size, apiToken);
+        getUploadUrls.prettyPrint();
+        getUploadUrls.then().assertThat().statusCode(200);
+
+        String url = JsonPath.from(getUploadUrls.asString()).getString("data.url");
+        String partSize = JsonPath.from(getUploadUrls.asString()).getString("data.partSize");
+        String storageIdentifier = JsonPath.from(getUploadUrls.asString()).getString("data.storageIdentifier");
+        System.out.println("url: " + url);
+        System.out.println("partSize: " + partSize);
+        System.out.println("storageIdentifier: " + storageIdentifier);
+
+        System.out.println("uploading file via direct upload");
+        String decodedUrl = null;
+        try {
+            decodedUrl = URLDecoder.decode(url, StandardCharsets.UTF_8.name());
+        } catch (UnsupportedEncodingException ex) {
+        }
+
+        InputStream inputStream = new ByteArrayInputStream("bumble".getBytes(StandardCharsets.UTF_8));
+        Response uploadFileDirect = uploadFileDirect(decodedUrl, inputStream);
+        uploadFileDirect.prettyPrint();
+        uploadFileDirect.then().assertThat().statusCode(200);
+
+        // TODO: Use MD5 or whatever Dataverse is configured for and
+        // actually calculate it.
+        String jsonData = """
+{
+    "description": "My description.",
+    "directoryLabel": "data/subdir1",
+    "categories": [
+      "Data"
+    ],
+    "restrict": "false",
+    "storageIdentifier": "%s",
+    "fileName": "file1.txt",
+    "mimeType": "text/plain",
+    "checksum": {
+      "@type": "SHA-1",
+      "@value": "123456"
+    }
+}
+""".formatted(storageIdentifier);
+        Response addRemoteFile = UtilIT.addRemoteFile(datasetId, jsonData, apiToken);
+        addRemoteFile.prettyPrint();
+        addRemoteFile.then().assertThat()
+                .statusCode(200);
+    }
+
+    static Response getUploadUrls(String idOrPersistentIdOfDataset, long sizeInBytes, String apiToken) {
+        String idInPath = idOrPersistentIdOfDataset; // Assume it's a number.
+        String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
+            idInPath = ":persistentId";
+            optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset;
+        }
+        RequestSpecification requestSpecification = given();
+        if (apiToken != null) {
+            requestSpecification = given()
+                    .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken);
+        }
+        return requestSpecification.get("/api/datasets/" + idInPath + "/uploadurls?size=" + sizeInBytes + optionalQueryParam);
+    }
+
+    static Response uploadFileDirect(String url, InputStream inputStream) {
+        return given()
+                .header("x-amz-tagging", "dv-state=temp")
+                .body(inputStream)
+                .put(url);
+    }
+
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java
index 29cb6895bba..74150ca120a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/S3AccessIT.java
@@ -1,71 +1,396 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
+import com.amazonaws.regions.Regions;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3ClientBuilder;
+import com.amazonaws.services.s3.model.AmazonS3Exception;
+import com.amazonaws.services.s3.model.Bucket;
+import com.amazonaws.services.s3.model.HeadBucketRequest;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.http.Header;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import io.restassured.specification.RequestSpecification;
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.OK;
-import static junit.framework.Assert.assertEquals;
+import org.apache.commons.lang3.math.NumberUtils;
 import static org.hamcrest.CoreMatchers.equalTo;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.hamcrest.CoreMatchers.nullValue;
 import static org.hamcrest.Matchers.startsWith;
-import org.junit.After;
-import org.junit.Assert;
+import org.junit.jupiter.api.Assertions;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
- *  NOTE: This test WILL NOT pass if your installation is not configured for Amazon S3 storage.
- *  For S3 storage, you must set two jvm options: storage-driver-id and s3-bucket-name
- *  Refer to the guides or to https://github.com/IQSS/dataverse/issues/3921#issuecomment-319973245
- * @author bsilverstein
+ * This test requires LocalStack and Minio to be running. Developers can use our
+ * docker-compose file, which has all the necessary configuration.
  */
 public class S3AccessIT {
-    
+
     private static final Logger logger = Logger.getLogger(S3AccessIT.class.getCanonicalName());
 
-    @BeforeClass
+    static final String BUCKET_NAME = "mybucket";
+    static AmazonS3 s3localstack = null;
+    static AmazonS3 s3minio = null;
+
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
-        
+
+        // At least in when spun up by our docker-compose file, the creds don't matter for LocalStack.
+        String accessKeyLocalStack = "whatever";
+        String secretKeyLocalStack = "not used";
+
+        s3localstack = AmazonS3ClientBuilder.standard()
+                .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKeyLocalStack, secretKeyLocalStack)))
+                .withEndpointConfiguration(new EndpointConfiguration("s3.localhost.localstack.cloud:4566", Regions.US_EAST_2.getName())).build();
+
+        String accessKeyMinio = "4cc355_k3y";
+        String secretKeyMinio = "s3cr3t_4cc355_k3y";
+        s3minio = AmazonS3ClientBuilder.standard()
+                // https://stackoverflow.com/questions/72205086/amazonss3client-throws-unknownhostexception-if-attempting-to-connect-to-a-local
+                .withPathStyleAccessEnabled(Boolean.TRUE)
+                .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials(accessKeyMinio, secretKeyMinio)))
+                .withEndpointConfiguration(new EndpointConfiguration("http://localhost:9000", Regions.US_EAST_1.getName())).build();
+
+//        System.out.println("buckets on LocalStack before attempting to create " + BUCKET_NAME);
+//        for (Bucket bucket : s3localstack.listBuckets()) {
+//            System.out.println("bucket: " + bucket);
+//        }
+//
+//        System.out.println("buckets on MinIO before attempting to create " + BUCKET_NAME);
+//        for (Bucket bucket : s3minio.listBuckets()) {
+//            System.out.println("bucket: " + bucket);
+//        }
+        // create bucket if it doesn't exist
+        // Note that we create the localstack bucket with conf/localstack/buckets.sh
+        // because we haven't figured out how to create it properly in Java.
+        // Perhaps it is missing ACLs.
+        try {
+            s3localstack.headBucket(new HeadBucketRequest(BUCKET_NAME));
+        } catch (AmazonS3Exception ex) {
+            s3localstack.createBucket(BUCKET_NAME);
+        }
+
+        try {
+            s3minio.headBucket(new HeadBucketRequest(BUCKET_NAME));
+        } catch (AmazonS3Exception ex) {
+            s3minio.createBucket(BUCKET_NAME);
+        }
+
     }
-    
+
+    /**
+     * We're using MinIO for testing non-direct upload.
+     */
     @Test
-    public void testAddDataFileS3Prefix() {
+    public void testNonDirectUpload() {
+        String driverId = "minio1";
+        String driverLabel = "MinIO";
+
+        Response createSuperuser = UtilIT.createRandomUser();
+        createSuperuser.then().assertThat().statusCode(200);
+        String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser);
+        String superusername = UtilIT.getUsernameFromResponse(createSuperuser);
+        UtilIT.makeSuperUser(superusername).then().assertThat().statusCode(200);
+        Response storageDrivers = UtilIT.listStorageDrivers(superuserApiToken);
+        storageDrivers.prettyPrint();
+        // TODO where is "Local/local" coming from?
+        String drivers = """
+{
+    "status": "OK",
+    "data": {
+        "LocalStack": "localstack1",
+        "MinIO": "minio1",
+        "Local": "local",
+        "Filesystem": "file1"
+    }
+}""";
+
         //create user who will make a dataverse/dataset
         Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(200);
         String username = UtilIT.getUsernameFromResponse(createUser);
         String apiToken = UtilIT.getApiTokenFromResponse(createUser);
-        
+
         Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.prettyPrint();
         String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
-        
+
+        Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken);
+        originalStorageDriver.prettyPrint();
+        originalStorageDriver.then().assertThat()
+                .body("data.message", equalTo("undefined"))
+                .statusCode(200);
+
+        Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken);
+        setStorageDriverToS3.prettyPrint();
+        setStorageDriverToS3.then().assertThat()
+                .statusCode(200);
+
+        Response updatedStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken);
+        updatedStorageDriver.prettyPrint();
+        updatedStorageDriver.then().assertThat()
+                .statusCode(200);
+
         Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
-        Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
         createDatasetResponse.prettyPrint();
-        
-        //upload a tabular file via native, check storage id prefix for s3
+        createDatasetResponse.then().assertThat().statusCode(201);
+        Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+        String datasetPid = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId");
+        String datasetStorageIdentifier = datasetPid.substring(4);
+
+        Response getDatasetMetadata = UtilIT.nativeGet(datasetId, apiToken);
+        getDatasetMetadata.prettyPrint();
+        getDatasetMetadata.then().assertThat().statusCode(200);
+
+        //upload a tabular file via native, check storage id prefix for driverId
         String pathToFile = "scripts/search/data/tabular/1char";
         Response addFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
         addFileResponse.prettyPrint();
         addFileResponse.then().assertThat()
-                .body("data.files[0].dataFile.storageIdentifier", startsWith("s3://"));
-        
-        //clean up test dvobjects and user
-        Response deleteDataset = UtilIT.deleteDatasetViaNativeApi(datasetId, apiToken);
-        deleteDataset.prettyPrint();
-        deleteDataset.then().assertThat()
+                .statusCode(200)
+                .body("data.files[0].dataFile.storageIdentifier", startsWith(driverId + "://"));
+
+        String fileId = JsonPath.from(addFileResponse.body().asString()).getString("data.files[0].dataFile.id");
+
+        Response getfileMetadata = UtilIT.getFileData(fileId, apiToken);
+        getfileMetadata.prettyPrint();
+        getfileMetadata.then().assertThat().statusCode(200);
+
+        String storageIdentifier = JsonPath.from(addFileResponse.body().asString()).getString("data.files[0].dataFile.storageIdentifier");
+        String keyInDataverse = storageIdentifier.split(":")[2];
+        Assertions.assertEquals(driverId + "://" + BUCKET_NAME + ":" + keyInDataverse, storageIdentifier);
+
+        String keyInS3 = datasetStorageIdentifier + "/" + keyInDataverse;
+        String s3Object = s3minio.getObjectAsString(BUCKET_NAME, keyInS3);
+        System.out.println("s3Object: " + s3Object);
+
+        // The file uploaded above only contains the character "a".
+        assertEquals("a".trim(), s3Object.trim());
+
+        System.out.println("non-direct download...");
+        Response downloadFile = UtilIT.downloadFile(Integer.valueOf(fileId), apiToken);
+        downloadFile.then().assertThat().statusCode(200);
+
+        String contentsOfDownloadedFile = downloadFile.getBody().asString();
+        assertEquals("a\n", contentsOfDownloadedFile);
+
+        Response deleteFile = UtilIT.deleteFileApi(Integer.parseInt(fileId), apiToken);
+        deleteFile.prettyPrint();
+        deleteFile.then().assertThat().statusCode(200);
+
+        AmazonS3Exception expectedException = null;
+        try {
+            s3minio.getObjectAsString(BUCKET_NAME, keyInS3);
+        } catch (AmazonS3Exception ex) {
+            expectedException = ex;
+        }
+        assertNotNull(expectedException);
+        // 404 because the file has been sucessfully deleted
+        assertEquals(404, expectedException.getStatusCode());
+
+    }
+
+    /**
+     * We use LocalStack to test direct upload.
+     */
+    @Test
+    public void testDirectUpload() {
+        String driverId = "localstack1";
+        String driverLabel = "LocalStack";
+        Response createSuperuser = UtilIT.createRandomUser();
+        createSuperuser.then().assertThat().statusCode(200);
+        String superuserApiToken = UtilIT.getApiTokenFromResponse(createSuperuser);
+        String superusername = UtilIT.getUsernameFromResponse(createSuperuser);
+        UtilIT.makeSuperUser(superusername).then().assertThat().statusCode(200);
+        Response storageDrivers = UtilIT.listStorageDrivers(superuserApiToken);
+        storageDrivers.prettyPrint();
+        // TODO where is "Local/local" coming from?
+        String drivers = """
+{
+    "status": "OK",
+    "data": {
+        "LocalStack": "localstack1",
+        "MinIO": "minio1",
+        "Local": "local",
+        "Filesystem": "file1"
+    }
+}""";
+
+        //create user who will make a dataverse/dataset
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(200);
+        String username = UtilIT.getUsernameFromResponse(createUser);
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        createDataverseResponse.prettyPrint();
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+
+        Response originalStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken);
+        originalStorageDriver.prettyPrint();
+        originalStorageDriver.then().assertThat()
+                .body("data.message", equalTo("undefined"))
                 .statusCode(200);
 
-        Response deleteDataverse = UtilIT.deleteDataverse(dataverseAlias, apiToken);
-        deleteDataverse.prettyPrint();
-        deleteDataverse.then().assertThat()
+        Response setStorageDriverToS3 = UtilIT.setStorageDriver(dataverseAlias, driverLabel, superuserApiToken);
+        setStorageDriverToS3.prettyPrint();
+        setStorageDriverToS3.then().assertThat()
                 .statusCode(200);
-        
-        Response deleteUser = UtilIT.deleteUser(username);
-        deleteUser.prettyPrint();
-        deleteUser.then().assertThat()
+
+        Response updatedStorageDriver = UtilIT.getStorageDriver(dataverseAlias, superuserApiToken);
+        updatedStorageDriver.prettyPrint();
+        updatedStorageDriver.then().assertThat()
                 .statusCode(200);
+
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDatasetResponse.prettyPrint();
+        createDatasetResponse.then().assertThat().statusCode(201);
+        Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
+        String datasetPid = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId");
+        String datasetStorageIdentifier = datasetPid.substring(4);
+
+        Response getDatasetMetadata = UtilIT.nativeGet(datasetId, apiToken);
+        getDatasetMetadata.prettyPrint();
+        getDatasetMetadata.then().assertThat().statusCode(200);
+
+//        //upload a tabular file via native, check storage id prefix for driverId
+//        String pathToFile = "scripts/search/data/tabular/1char";
+//        Response addFileResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken);
+//        addFileResponse.prettyPrint();
+//        addFileResponse.then().assertThat()
+//                .statusCode(200)
+//                .body("data.files[0].dataFile.storageIdentifier", startsWith(driverId + "://"));
+//
+//        String fileId = JsonPath.from(addFileResponse.body().asString()).getString("data.files[0].dataFile.id");
+        long size = 1000000000l;
+        Response getUploadUrls = UtilIT.getUploadUrls(datasetPid, size, apiToken);
+        getUploadUrls.prettyPrint();
+        getUploadUrls.then().assertThat().statusCode(200);
+
+        String url = JsonPath.from(getUploadUrls.asString()).getString("data.url");
+        String partSize = JsonPath.from(getUploadUrls.asString()).getString("data.partSize");
+        String storageIdentifier = JsonPath.from(getUploadUrls.asString()).getString("data.storageIdentifier");
+        System.out.println("url: " + url);
+        System.out.println("partSize: " + partSize);
+        System.out.println("storageIdentifier: " + storageIdentifier);
+
+        System.out.println("uploading file via direct upload");
+        String decodedUrl = null;
+        try {
+            decodedUrl = URLDecoder.decode(url, StandardCharsets.UTF_8.name());
+        } catch (UnsupportedEncodingException ex) {
+        }
+
+        // change to localhost because LocalStack is running in a container locally
+        String localhostUrl = decodedUrl.replace("http://localstack", "http://localhost");
+        String contentsOfFile = "foobar";
+
+        InputStream inputStream = new ByteArrayInputStream(contentsOfFile.getBytes(StandardCharsets.UTF_8));
+        Response uploadFileDirect = UtilIT.uploadFileDirect(localhostUrl, inputStream);
+        uploadFileDirect.prettyPrint();
+        /*
+        Direct upload to MinIO is failing with errors like this:
+        <Error>
+          <Code>SignatureDoesNotMatch</Code>
+          <Message>The request signature we calculated does not match the signature you provided. Check your key and signing method.</Message>
+          <Key>10.5072/FK2/KGFCEJ/18b8c06688c-21b8320a3ee5</Key>
+          <BucketName>mybucket</BucketName>
+          <Resource>/mybucket/10.5072/FK2/KGFCEJ/18b8c06688c-21b8320a3ee5</Resource>
+          <RequestId>1793915CCC5BC95C</RequestId>
+          <HostId>dd9025bab4ad464b049177c95eb6ebf374d3b3fd1af9251148b658df7ac2e3e8</HostId>
+        </Error>
+         */
+        uploadFileDirect.then().assertThat().statusCode(200);
+
+        // TODO: Use MD5 or whatever Dataverse is configured for and
+        // actually calculate it.
+        String jsonData = """
+{
+    "description": "My description.",
+    "directoryLabel": "data/subdir1",
+    "categories": [
+      "Data"
+    ],
+    "restrict": "false",
+    "storageIdentifier": "%s",
+    "fileName": "file1.txt",
+    "mimeType": "text/plain",
+    "checksum": {
+      "@type": "SHA-1",
+      "@value": "123456"
+    }
+}
+""".formatted(storageIdentifier);
+
+        // "There was an error when trying to add the new file. File size must be explicitly specified when creating DataFiles with Direct Upload"
+        Response addRemoteFile = UtilIT.addRemoteFile(datasetId.toString(), jsonData, apiToken);
+        addRemoteFile.prettyPrint();
+        addRemoteFile.then().assertThat()
+                .statusCode(200);
+
+        String fileId = JsonPath.from(addRemoteFile.asString()).getString("data.files[0].dataFile.id");
+        Response getfileMetadata = UtilIT.getFileData(fileId, apiToken);
+        getfileMetadata.prettyPrint();
+        getfileMetadata.then().assertThat().statusCode(200);
+
+//        String storageIdentifier = JsonPath.from(addFileResponse.body().asString()).getString("data.files[0].dataFile.storageIdentifier");
+        String keyInDataverse = storageIdentifier.split(":")[2];
+        Assertions.assertEquals(driverId + "://" + BUCKET_NAME + ":" + keyInDataverse, storageIdentifier);
+
+        String keyInS3 = datasetStorageIdentifier + "/" + keyInDataverse;
+        String s3Object = s3localstack.getObjectAsString(BUCKET_NAME, keyInS3);
+        System.out.println("s3Object: " + s3Object);
+
+//        assertEquals(contentsOfFile.trim(), s3Object.trim());
+        assertEquals(contentsOfFile, s3Object);
+
+        System.out.println("direct download...");
+        Response getHeaders = UtilIT.downloadFileNoRedirect(Integer.valueOf(fileId), apiToken);
+        for (Header header : getHeaders.getHeaders()) {
+            System.out.println("direct download header: " + header);
+        }
+        getHeaders.then().assertThat().statusCode(303);
+
+        String urlFromResponse = getHeaders.getHeader("Location");
+        String localhostDownloadUrl = urlFromResponse.replace("localstack", "localhost");
+        String decodedDownloadUrl = null;
+        try {
+            decodedDownloadUrl = URLDecoder.decode(localhostDownloadUrl, StandardCharsets.UTF_8.name());
+        } catch (UnsupportedEncodingException ex) {
+        }
+
+        Response downloadFile = UtilIT.downloadFromUrl(decodedDownloadUrl);
+        downloadFile.prettyPrint();
+        downloadFile.then().assertThat().statusCode(200);
+
+        String contentsOfDownloadedFile = downloadFile.getBody().asString();
+        assertEquals(contentsOfFile, contentsOfDownloadedFile);
+
+        Response deleteFile = UtilIT.deleteFileApi(Integer.parseInt(fileId), apiToken);
+        deleteFile.prettyPrint();
+        deleteFile.then().assertThat().statusCode(200);
+
+        AmazonS3Exception expectedException = null;
+        try {
+            s3localstack.getObjectAsString(BUCKET_NAME, keyInS3);
+        } catch (AmazonS3Exception ex) {
+            expectedException = ex;
+        }
+        assertNotNull(expectedException);
+        // 404 because the file has been sucessfully deleted
+        assertEquals(404, expectedException.getStatusCode());
+
     }
+
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
index 61a55a88a3b..125753296a2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
@@ -1,49 +1,49 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.AfterEach;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import java.io.File;
 import java.io.InputStream;
 import java.io.UnsupportedEncodingException;
 import java.util.Base64;
-import javax.json.JsonArray;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
+import jakarta.json.JsonArray;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
 import org.hamcrest.CoreMatchers;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import java.awt.image.BufferedImage;
 import java.io.IOException;
-import static junit.framework.Assert.assertEquals;
 import static java.lang.Thread.sleep;
 import javax.imageio.ImageIO;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
 import org.hamcrest.Matchers;
-import org.junit.After;
-import static org.junit.Assert.assertNotEquals;
-import static java.lang.Thread.sleep;
-import javax.json.JsonObjectBuilder;
+
+import jakarta.json.JsonObjectBuilder;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class SearchIT {
 
     private static final Logger logger = Logger.getLogger(SearchIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
 
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
@@ -105,7 +105,7 @@ public void testSearchPermisions() throws InterruptedException {
         assertEquals(200, grantUser2AccessOnDataset.getStatusCode());
 
         String searchPart = "id:dataset_" + datasetId1 + "_draft";        
-        assertTrue("Failed test if search exceeds max duration " + searchPart , UtilIT.sleepForSearch(searchPart, apiToken2, "", UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); 
+        assertTrue(UtilIT.sleepForSearch(searchPart, apiToken2, "", UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if search exceeds max duration " + searchPart);
         
         Response shouldBeVisibleToUser2 = UtilIT.search("id:dataset_" + datasetId1 + "_draft", apiToken2);
         shouldBeVisibleToUser2.prettyPrint();
@@ -155,7 +155,7 @@ public void testSearchPermisions() throws InterruptedException {
         Response dataverse47behaviorOfTokensBeingRequired = UtilIT.search("id:dataset_" + datasetId1, nullToken);
         dataverse47behaviorOfTokensBeingRequired.prettyPrint();
         dataverse47behaviorOfTokensBeingRequired.then().assertThat()
-                .body("message", CoreMatchers.equalTo("Please provide a key query parameter (?key=XXX) or via the HTTP header X-Dataverse-key"))
+                .body("message", CoreMatchers.equalTo(AbstractApiBean.RESPONSE_MESSAGE_AUTHENTICATED_USER_REQUIRED))
                 .statusCode(UNAUTHORIZED.getStatusCode());
 
         Response reEnableTokenlessSearch = UtilIT.deleteSetting(SettingsServiceBean.Key.SearchApiRequiresToken);
@@ -311,7 +311,7 @@ public void testSearchDynamicMetadataFields() {
         allFieldsFromCitation.then().assertThat()
                 .body("data.items[0].metadataBlocks.citation.displayName", CoreMatchers.equalTo("Citation Metadata"))
                 // Many fields returned, all of the citation block that has been filled in.
-                .body("data.items[0].metadataBlocks.citation.fields.typeName.size", Matchers.equalTo(5))
+                .body("data.items[0].metadataBlocks.citation.fields", Matchers.hasSize(5))
                 .statusCode(OK.getStatusCode());
 
     }
@@ -747,6 +747,7 @@ public void testIdentifier() {
         System.out.println("identifier: " + identifier);
 
         String searchPart = identifier.replace("FK2/", "");
+        UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5);
         Response searchUnpublished = UtilIT.search(searchPart, apiToken);
         searchUnpublished.prettyPrint();
         searchUnpublished.then().assertThat()
@@ -762,6 +763,7 @@ public void testIdentifier() {
                 .statusCode(OK.getStatusCode());
 
         searchPart = identifier.replace("FK2/", "");
+        UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5);
         Response searchTargeted = UtilIT.search("dsPersistentId:" + searchPart, apiToken);
         searchTargeted.prettyPrint();
         searchTargeted.then().assertThat()
@@ -812,7 +814,7 @@ public void testNestedSubtree() {
                 .statusCode(OK.getStatusCode());
 
         try {
-            Thread.sleep(2000);
+            Thread.sleep(4000);
         } catch (InterruptedException ex) {
             /**
              * This sleep is here because dataverseAlias2 is showing with
@@ -913,7 +915,7 @@ public void testCuratorCardDataversePopulation() throws InterruptedException {
         String searchPart = "*"; 
         
         Response searchPublishedSubtreeSuper = UtilIT.search(searchPart, apiTokenSuper, "&subtree="+parentDataverseAlias);
-        assertTrue("Failed test if search exceeds max duration " + searchPart , UtilIT.sleepForSearch(searchPart, apiToken, "&subtree="+parentDataverseAlias, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); 
+        assertTrue(UtilIT.sleepForSearch(searchPart, apiToken, "&subtree="+parentDataverseAlias, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if search exceeds max duration " + searchPart);
         searchPublishedSubtreeSuper.prettyPrint();
         searchPublishedSubtreeSuper.then().assertThat()
                 .statusCode(OK.getStatusCode())
@@ -964,6 +966,9 @@ public void testSubtreePermissions() {
         Response datasetAsJson2 = UtilIT.nativeGet(datasetId2, apiToken);
         datasetAsJson2.then().assertThat()
                 .statusCode(OK.getStatusCode());
+        
+        // Wait a little while for the index to pick up the datasets, otherwise timing issue with searching for it.
+        UtilIT.sleepForReindex(datasetId2.toString(), apiToken, 2);
 
         String identifier = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier");
         String identifier2 = JsonPath.from(datasetAsJson2.getBody().asString()).getString("data.identifier"); 
@@ -973,12 +978,12 @@ public void testSubtreePermissions() {
         Response searchFakeSubtree = UtilIT.search(searchPart, apiToken, "&subtree=fake");
         searchFakeSubtree.prettyPrint();
         searchFakeSubtree.then().assertThat()
-                .statusCode(400);
+                .statusCode(BAD_REQUEST.getStatusCode());
         
         Response searchFakeSubtreeNoAPI = UtilIT.search(searchPart, null, "&subtree=fake");
         searchFakeSubtreeNoAPI.prettyPrint();
         searchFakeSubtreeNoAPI.then().assertThat()
-                .statusCode(400);
+                .statusCode(BAD_REQUEST.getStatusCode());
 
         Response searchUnpublishedSubtree = UtilIT.search(searchPart, apiToken, "&subtree="+dataverseAlias);
         searchUnpublishedSubtree.prettyPrint();
@@ -1006,25 +1011,29 @@ public void testSubtreePermissions() {
                 // TODO: investigate if this is a bug that nothing was found.
                 .body("data.total_count", CoreMatchers.equalTo(0));
 
+        UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5);
         Response searchUnpublishedRootSubtreeForDataset = UtilIT.search(identifier.replace("FK2/", ""), apiToken, "&subtree=root");
         searchUnpublishedRootSubtreeForDataset.prettyPrint();
         searchUnpublishedRootSubtreeForDataset.then().assertThat()
                 .statusCode(OK.getStatusCode())
                 .body("data.total_count", CoreMatchers.equalTo(1));
 
+        UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5);
         Response searchUnpublishedRootSubtreeForDatasetNoAPI = UtilIT.search(identifier.replace("FK2/", ""), null, "&subtree=root");
         searchUnpublishedRootSubtreeForDatasetNoAPI.prettyPrint();
         searchUnpublishedRootSubtreeForDatasetNoAPI.then().assertThat()
                 .statusCode(OK.getStatusCode())
                 // TODO: investigate if this is a bug that nothing was found.
                 .body("data.total_count", CoreMatchers.equalTo(0));
-        
+
+        UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5);
         Response searchUnpublishedNoSubtreeForDataset = UtilIT.search(identifier.replace("FK2/", ""), apiToken, "");
         searchUnpublishedNoSubtreeForDataset.prettyPrint();
         searchUnpublishedNoSubtreeForDataset.then().assertThat()
                 .statusCode(OK.getStatusCode())
                 .body("data.total_count", CoreMatchers.equalTo(1));
         
+        UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5);
         Response searchUnpublishedNoSubtreeForDatasetNoAPI = UtilIT.search(identifier.replace("FK2/", ""), null, "");
         searchUnpublishedNoSubtreeForDatasetNoAPI.prettyPrint();
         searchUnpublishedNoSubtreeForDatasetNoAPI.then().assertThat()
@@ -1074,12 +1083,14 @@ public void testSubtreePermissions() {
                 .statusCode(OK.getStatusCode())
                 .body("data.total_count", CoreMatchers.equalTo(2));
         
+        UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5);
         Response searchPublishedRootSubtreeForDataset = UtilIT.search(identifier.replace("FK2/", ""), apiToken, "&subtree=root");
         searchPublishedRootSubtreeForDataset.prettyPrint();
         searchPublishedRootSubtreeForDataset.then().assertThat()
                 .statusCode(OK.getStatusCode())
                 .body("data.total_count", CoreMatchers.equalTo(1));
-        
+
+        UtilIT.sleepForReindex(String.valueOf(datasetId), apiToken, 5);
         Response searchPublishedRootSubtreeForDatasetNoAPI = UtilIT.search(identifier.replace("FK2/", ""), null, "&subtree=root");
         searchPublishedRootSubtreeForDatasetNoAPI.prettyPrint();
         searchPublishedRootSubtreeForDatasetNoAPI.then().assertThat()
@@ -1274,7 +1285,7 @@ public void testGeospatialSearchInvalid() {
 
     }
 
-    @After
+    @AfterEach
     public void tearDownDataverse() {
         File treesThumb = new File("scripts/search/data/binary/trees.png.thumb48");
         treesThumb.delete();
@@ -1284,7 +1295,7 @@ public void tearDownDataverse() {
         dataverseprojectThumb.delete();
     }
 
-    @AfterClass
+    @AfterAll
     public static void cleanup() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignedUrlAuthMechanismIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignedUrlAuthMechanismIT.java
new file mode 100644
index 00000000000..dd5ddbfa7d8
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SignedUrlAuthMechanismIT.java
@@ -0,0 +1,46 @@
+package edu.harvard.iq.dataverse.api;
+
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static io.restassured.RestAssured.get;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+public class SignedUrlAuthMechanismIT {
+
+    @BeforeAll
+    public static void setUp() {
+        RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+    }
+
+    @Test
+    public void testSignedUrlAuthMechanism() {
+        // Test user setup
+        Response createUserResponse = UtilIT.createRandomUser();
+        String username = UtilIT.getUsernameFromResponse(createUserResponse);
+        String apiToken = UtilIT.getApiTokenFromResponse(createUserResponse);
+        UtilIT.makeSuperUser(username);
+
+        // Test dataset setup
+        Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken);
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse);
+        Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        String datasetPersistentId = UtilIT.getDatasetPersistentIdFromResponse(createDatasetResponse);
+
+        // Valid Signed URL behavior
+        String apiPath = String.format("/api/v1/datasets/:persistentId/?persistentId=%s", datasetPersistentId);
+        Response createSignedUrlResponse = UtilIT.createSignedUrl(apiToken, apiPath, username);
+        String signedUrl = UtilIT.getSignedUrlFromResponse(createSignedUrlResponse);
+        Response signedUrlResponse = get(signedUrl);
+        assertEquals(OK.getStatusCode(), signedUrlResponse.getStatusCode());
+
+        // Invalid Signed URL behavior
+        String invalidSignedUrlPath = String.format("/api/v1/datasets/:persistentId/?persistentId=%s&until=2999-01-01T23:59:29.855&user=dataverseAdmin&method=GET&token=invalidToken", datasetPersistentId);
+        Response invalidSignedUrlResponse = get(invalidSignedUrlPath);
+        assertEquals(UNAUTHORIZED.getStatusCode(), invalidSignedUrlResponse.getStatusCode());
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java
new file mode 100644
index 00000000000..75f514f3398
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java
@@ -0,0 +1,117 @@
+package edu.harvard.iq.dataverse.api;
+
+import io.restassured.RestAssured;
+import io.restassured.http.ContentType;
+
+import static io.restassured.RestAssured.given;
+import io.restassured.response.Response;
+
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import jakarta.json.JsonObject;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+public class SignpostingIT {
+
+    @BeforeAll
+    public static void setUpClass() {
+        RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+    }
+
+    @Test
+    public void testSignposting() {
+
+        Response createUser = UtilIT.createRandomUser();
+        createUser.then().assertThat().statusCode(OK.getStatusCode());
+        String username = UtilIT.getUsernameFromResponse(createUser);
+        String apiToken = UtilIT.getApiTokenFromResponse(createUser);
+        Response toggleSuperuser = UtilIT.makeSuperUser(username);
+        toggleSuperuser.then().assertThat().statusCode(OK.getStatusCode());
+
+        Response createDataverse = UtilIT.createRandomDataverse(apiToken);
+        createDataverse.then().assertThat().statusCode(CREATED.getStatusCode());
+        String dataverseAlias = UtilIT.getAliasFromResponse(createDataverse);
+        Integer dataverseId = UtilIT.getDataverseIdFromResponse(createDataverse);
+
+        Response createDataset = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken);
+        createDataset.prettyPrint();
+        createDataset.then().assertThat().statusCode(CREATED.getStatusCode());
+
+        String datasetPid = UtilIT.getDatasetPersistentIdFromResponse(createDataset);
+
+        Response publishDataverse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken);
+        publishDataverse.then().assertThat().statusCode(OK.getStatusCode());
+        Response publishDataset = UtilIT.publishDatasetViaNativeApi(datasetPid, "major", apiToken);
+        publishDataset.then().assertThat().statusCode(OK.getStatusCode());
+
+        String datasetLandingPage = RestAssured.baseURI + "/dataset.xhtml?persistentId=" + datasetPid;
+        System.out.println("Checking dataset landing page for Signposting: " + datasetLandingPage);
+        Response getHtml = given().get(datasetLandingPage);
+
+        System.out.println("Link header: " + getHtml.getHeader("Link"));
+
+        getHtml.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Make sure there's Signposting stuff in the "Link" header such as
+        // the dataset PID, cite-as, etc.
+        String linkHeader = getHtml.getHeader("Link");
+        assertTrue(linkHeader.contains(datasetPid));
+        assertTrue(linkHeader.contains("cite-as"));
+        assertTrue(linkHeader.contains("describedby"));
+
+        Response headHtml = given().head(datasetLandingPage);
+
+        System.out.println("Link header: " + headHtml.getHeader("Link"));
+
+        headHtml.then().assertThat().statusCode(OK.getStatusCode());
+
+        // Make sure there's Signposting stuff in the "Link" header such as
+        // the dataset PID, cite-as, etc.
+        linkHeader = getHtml.getHeader("Link");
+        assertTrue(linkHeader.contains(datasetPid));
+        assertTrue(linkHeader.contains("cite-as"));
+        assertTrue(linkHeader.contains("describedby"));
+        assertTrue(linkHeader.contains("<http://creativecommons.org/publicdomain/zero/1.0>;rel=\"license\""));
+
+        Pattern pattern = Pattern.compile("<([^<]*)> ; rel=\"linkset\";type=\"application\\/linkset\\+json\"");
+        Matcher matcher = pattern.matcher(linkHeader);
+        matcher.find();
+        String linksetUrl = matcher.group(1);
+
+        System.out.println("Linkset URL: " + linksetUrl);
+
+        Response linksetResponse = given().accept(ContentType.JSON).get(linksetUrl);
+
+        String responseString = linksetResponse.getBody().asString();
+
+        JsonObject data = JsonUtil.getJsonObject(responseString);
+        JsonObject lso = data.getJsonArray("linkset").getJsonObject(0);
+        System.out.println("Linkset: " + lso.toString());
+
+        linksetResponse.then().assertThat().statusCode(OK.getStatusCode());
+
+        assertTrue(lso.getString("anchor").indexOf("/dataset.xhtml?persistentId=" + datasetPid) > 0);
+        assertTrue(lso.containsKey("describedby"));
+
+        // Test export URL from link header
+        // regex inspired by https://stackoverflow.com/questions/68860255/how-to-match-the-closest-opening-and-closing-brackets
+        Pattern exporterPattern = Pattern.compile("[<\\[][^()\\[\\]]*?exporter=schema.org[^()\\[\\]]*[>\\]]");
+        Matcher exporterMatcher = exporterPattern.matcher(linkHeader);
+        exporterMatcher.find();
+
+        Response exportDataset = UtilIT.exportDataset(datasetPid, "schema.org");
+        exportDataset.prettyPrint();
+        exportDataset.then().assertThat().statusCode(OK.getStatusCode());
+
+    }
+
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SiteMapIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SiteMapIT.java
index 723f05d3802..1b9025cab82 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SiteMapIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SiteMapIT.java
@@ -1,13 +1,13 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import io.restassured.response.Response;
 
 public class SiteMapIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/StorageSitesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/StorageSitesIT.java
index a33d7d60263..89208997ee3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/StorageSitesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/StorageSitesIT.java
@@ -1,17 +1,17 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.locality.StorageSite;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class StorageSitesIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
index 6c745790359..39156f1c59b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java
@@ -1,9 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import edu.harvard.iq.dataverse.GlobalId;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.api.datadeposit.SwordConfigurationImpl;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.io.File;
@@ -13,27 +12,28 @@
 import java.util.List;
 import java.util.Map;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.FORBIDDEN;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static javax.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.NO_CONTENT;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static jakarta.ws.rs.core.Response.Status.METHOD_NOT_ALLOWED;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.NO_CONTENT;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.CoreMatchers.nullValue;
 import static org.hamcrest.Matchers.endsWith;
 import static org.hamcrest.Matchers.startsWith;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 /**
  * In all these tests you should never see something like "[long string exposing
@@ -55,7 +55,7 @@ public class SwordIT {
     private static final String rootDvNotPublished = "Many of these SWORD tests require that the root dataverse collection has been published. Publish the root dataverse and then re-run these tests.";
     private static final String rootDvLackPermissions = "Many of these SWORD tests require you set permissions for the root dataverse collection: \"Anyone with a Dataverse account can add sub dataverses and datasets\" + curator role for new datasets. Please set and re-run these tests.";
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
         boolean testAgainstDev1 = false;
@@ -73,7 +73,7 @@ public static void setUpClass() {
         Response checkRootDataverse = UtilIT.listDatasetsViaSword(rootDataverseAlias, apitoken);
         //checkRootDataverse.prettyPrint();
         checkRootDataverse.then().assertThat().statusCode(OK.getStatusCode());
-        assumeTrue(rootDvNotPublished,  checkRootDataverse.getBody().xmlPath().getBoolean("feed.dataverseHasBeenReleased"));
+        assumeTrue(checkRootDataverse.getBody().xmlPath().getBoolean("feed.dataverseHasBeenReleased"), rootDvNotPublished);
         
         // check that root dataverse has permissions for any user set to dataverse + dataset creator (not admin, not curator!)
         checkRootDataverse = UtilIT.getRoleAssignmentsOnDataverse(rootDataverseAlias, apiTokenSuperuser);
@@ -88,7 +88,7 @@ public static void setUpClass() {
                 break;
             }
         }
-        assumeTrue(rootDvLackPermissions, properPermissionsSet);
+        assumeTrue(properPermissionsSet, rootDvLackPermissions);
 
     }
 
@@ -389,21 +389,19 @@ public void testCreateAndDeleteDatasetInRoot() {
 
         String persistentId = null;
         Integer datasetId = null;
-        String protocol;
-        String authority;
-        String identifier = null;
 
         Response createDataset = UtilIT.createDatasetViaSwordApi(rootDataverseAlias, datasetTitle, apiTokenContributor);
-        createDataset.prettyPrint();
+        String createResponse = createDataset.prettyPrint();
         createDataset.then().assertThat()
                 .statusCode(CREATED.getStatusCode())
                 .body("entry.treatment", equalTo("no treatment information available"));
 
         persistentId = UtilIT.getDatasetPersistentIdFromSwordResponse(createDataset);
-        GlobalId globalId = new GlobalId(persistentId);
-        protocol = globalId.getProtocol();
-        authority = globalId.getAuthority();
-        identifier = globalId.getIdentifier();
+        //previsouly the test parsed the persistentID but this is now done via PIDProviderBeans
+        //Instead, verify it starts with the protocol and the rest was what was returned in the createDataset call
+        assertTrue(persistentId.startsWith("doi:"));
+        String identifier = persistentId.substring(4);
+        assertTrue(createResponse.contains(identifier));
 
         Response listDatasetsAtRoot = UtilIT.listDatasetsViaSword(rootDataverseAlias, apiTokenContributor);
         listDatasetsAtRoot.prettyPrint();
@@ -970,7 +968,7 @@ public void testDeleteFiles() {
 
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
         // cleanup, allow custom terms again (delete because it defaults to true)
         UtilIT.deleteSetting(SettingsServiceBean.Key.AllowCustomTermsOfUse);
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/TabularIT.java b/src/test/java/edu/harvard/iq/dataverse/api/TabularIT.java
index 512080ae569..25eec16e17b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/TabularIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/TabularIT.java
@@ -1,33 +1,31 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
-import static edu.harvard.iq.dataverse.api.AccessIT.apiToken;
-import static edu.harvard.iq.dataverse.api.AccessIT.datasetId;
-import static edu.harvard.iq.dataverse.api.AccessIT.tabFile3NameRestricted;
+import io.restassured.RestAssured;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+
 import java.io.File;
 import java.util.Arrays;
 import java.util.logging.Logger;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.OK;
 import static org.hamcrest.CoreMatchers.equalTo;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 public class TabularIT {
 
     private static final Logger logger = Logger.getLogger(TabularIT.class.getCanonicalName());
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testTabularFile() throws InterruptedException {
         Response createUser = UtilIT.createRandomUser();
@@ -63,7 +61,7 @@ public void testTabularFile() throws InterruptedException {
 
         // Give file time to ingest
         
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest , UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + pathToFileThatGoesThroughIngest);
       //  Thread.sleep(10000);
 
         Response fileMetadataNoFormat = UtilIT.getFileMetadata(fileIdAsString, null, apiToken);
@@ -102,7 +100,7 @@ public void testTabularFile() throws InterruptedException {
 
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void test50by1000() {
         // cp scripts/search/data/tabular/50by1000.dta /tmp
@@ -113,7 +111,7 @@ public void test50by1000() {
         assertEquals("NVARS: 50", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata13TinyFile() {
         // cp scripts/search/data/tabular/120745.dta /tmp
@@ -124,7 +122,7 @@ public void testStata13TinyFile() {
         assertEquals("NVARS: 1", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata13Auto() {
         // curl https://www.stata-press.com/data/r13/auto.dta > /tmp/stata13-auto.dta
@@ -135,7 +133,7 @@ public void testStata13Auto() {
         assertEquals("NVARS: 12", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata14OpenSourceAtHarvard() {
         // https://dataverse.harvard.edu/file.xhtml?fileId=3040230 converted to Stata 14: 2017-07-31.tab
@@ -148,7 +146,7 @@ public void testStata14OpenSourceAtHarvard() {
         assertEquals("NVARS: 10", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata14Aggregated() {
         // https://dataverse.harvard.edu/file.xhtml?fileId=3140457 Stata 14: 2018_04_06_Aggregated_dataset_v2.dta
@@ -160,7 +158,7 @@ public void testStata14Aggregated() {
         assertEquals("NVARS: 227", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata14MmPublic() {
         // TODO: This file was downloaded at random. We could keep trying to get it to ingest.
@@ -175,7 +173,7 @@ public void testStata14MmPublic() {
         assertEquals("NVARS: 12", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata15() {
         // for i in `echo {0..33000}`; do echo -n "var$i,"; done > 33k.csv
@@ -187,7 +185,7 @@ public void testStata15() {
         assertEquals("NVARS: 33001", response.body().asString().split("\n")[0]);
     }
 
-    @Ignore
+    @Disabled
     @Test
     public void testStata13Multiple() {
         String fileType = "application/x-stata-13";
@@ -207,7 +205,7 @@ public void testStata13Multiple() {
         }
     }
     
-    @Ignore
+    @Disabled
     @Test
     public void testStata14Multiple() {
         String fileType = "application/x-stata-14";
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/ThumbnailsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/ThumbnailsIT.java
index ffa432de63b..8d5b6d86cd9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/ThumbnailsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/ThumbnailsIT.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.response.Response;
+import io.restassured.response.Response;
 import org.hamcrest.CoreMatchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class ThumbnailsIT {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
index 83dfc5fd889..5880b08e5c2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
@@ -1,33 +1,33 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import static com.jayway.restassured.RestAssured.given;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import static io.restassured.RestAssured.given;
+import io.restassured.http.ContentType;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.UUID;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
-import static javax.ws.rs.core.Response.Status.CREATED;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.Status.OK;
-import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
-import static junit.framework.Assert.assertEquals;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
+import static jakarta.ws.rs.core.Response.Status.OK;
+import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.hamcrest.CoreMatchers.equalTo;
 import static org.hamcrest.Matchers.contains;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class UsersIT {
 
-    @BeforeClass
+    @BeforeAll
     public static void setUp() {
         RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
        /* 
@@ -162,7 +162,7 @@ public void testMergeAccounts(){
         Integer tabFile3IdRestrictedNew = JsonPath.from(tab3AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
         
         //Sleep while dataset locked for ingest
-        assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestrictedNew , UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", superuserApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
+        assertTrue(UtilIT.sleepForLock(datasetIdNew.longValue(), "Ingest", superuserApiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if Ingest Lock exceeds max duration " + tabFile3NameRestrictedNew);
 
         Response restrictResponse = UtilIT.restrictFile(tabFile3IdRestrictedNew.toString(), true, superuserApiToken);
         restrictResponse.prettyPrint();
@@ -404,7 +404,7 @@ public void testAPITokenEndpoints() {
         createDatasetResponse.prettyPrint();
         Integer datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id");
         
-        Response createPrivateUrl = UtilIT.privateUrlCreate(datasetId, apiToken);
+        Response createPrivateUrl = UtilIT.privateUrlCreate(datasetId, apiToken, false);
         createPrivateUrl.prettyPrint();
         assertEquals(OK.getStatusCode(), createPrivateUrl.getStatusCode());
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index dc9152859ee..e29677c2252 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -1,35 +1,34 @@
 package edu.harvard.iq.dataverse.api;
 
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.http.ContentType;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+
+import java.io.*;
 import java.util.UUID;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.ws.rs.client.Client;
-import javax.ws.rs.client.ClientBuilder;
-import java.io.File;
-import java.io.IOException;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import static jakarta.ws.rs.core.Response.Status.CREATED;
+
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.time.LocalDateTime;
 import java.util.logging.Level;
 import edu.harvard.iq.dataverse.api.datadeposit.SwordConfigurationImpl;
-import com.jayway.restassured.path.xml.XmlPath;
+import io.restassured.path.xml.XmlPath;
+import edu.harvard.iq.dataverse.mydata.MyDataFilterParams;
 import org.apache.commons.lang3.StringUtils;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import com.jayway.restassured.specification.RequestSpecification;
+import io.restassured.specification.RequestSpecification;
 import java.util.List;
 import com.mashape.unirest.http.Unirest;
 import com.mashape.unirest.http.exceptions.UnirestException;
 import com.mashape.unirest.request.GetRequest;
-import java.io.InputStream;
 import edu.harvard.iq.dataverse.util.FileUtil;
 import java.util.Base64;
 import org.apache.commons.io.IOUtils;
@@ -40,19 +39,17 @@
 import org.hamcrest.Description;
 import org.hamcrest.Matcher;
 
-import static com.jayway.restassured.RestAssured.put;
-import static com.jayway.restassured.path.xml.XmlPath.from;
-import static com.jayway.restassured.RestAssured.given;
+import static edu.harvard.iq.dataverse.api.ApiConstants.*;
+import static io.restassured.path.xml.XmlPath.from;
+import static io.restassured.RestAssured.given;
 import edu.harvard.iq.dataverse.DatasetField;
-import edu.harvard.iq.dataverse.DatasetFieldConstant;
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetFieldValue;
 import edu.harvard.iq.dataverse.util.StringUtil;
-import java.io.StringReader;
+
 import java.util.Collections;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class UtilIT {
 
@@ -65,8 +62,9 @@ public class UtilIT {
     private static final String BUILTIN_USER_KEY = "burrito";
     private static final String EMPTY_STRING = "";
     public static final int MAXIMUM_INGEST_LOCK_DURATION = 15;
-    public static final int MAXIMUM_PUBLISH_LOCK_DURATION = 15;
-    
+    public static final int MAXIMUM_PUBLISH_LOCK_DURATION = 20;
+    public static final int MAXIMUM_IMPORT_DURATION = 1;
+
     private static SwordConfigurationImpl swordConfiguration = new SwordConfigurationImpl();
     
     static Matcher<String> equalToCI( String value ) {
@@ -120,6 +118,16 @@ public static Response createRandomUser() {
         return createRandomUser("user");
     }
 
+    /**
+     * A convenience method for creating a random test user, when all you need
+     * is the api token.
+     * @return apiToken
+     */
+    public static String createRandomUserGetToken(){
+        Response createUser = createRandomUser();
+        return getApiTokenFromResponse(createUser);
+    }
+
     public static Response createUser(String username, String email) {
         logger.info("Creating user " + username);
         String userAsJson = getUserAsJsonString(username, username, username, email);
@@ -215,7 +223,19 @@ public static Response validateDataFileHashValue(String fileId,  String apiToken
                 .post("/api/admin/validateDataFileHashValue/" + fileId + "?key=" + apiToken);
         return response;
     }
-    
+
+    public static Response clearThumbnailFailureFlags() {
+        Response response = given()
+                .delete("/api/admin/clearThumbnailFailureFlag");
+        return response;
+    }
+
+    public static Response clearThumbnailFailureFlag(long fileId) {
+        Response response = given()
+                .delete("/api/admin/clearThumbnailFailureFlag/" + fileId);
+        return response;
+    }
+
     private static String getAuthenticatedUserAsJsonString(String persistentUserId, String firstName, String lastName, String authenticationProviderId, String identifier) {
         JsonObjectBuilder builder = Json.createObjectBuilder();
         builder.add("authenticationProviderId", authenticationProviderId);
@@ -290,7 +310,7 @@ static String getAliasFromResponse(Response createDataverseResponse) {
     static Integer getDataverseIdFromResponse(Response createDataverseResponse) {
         JsonPath createdDataverse = JsonPath.from(createDataverseResponse.body().asString());
         int dataverseId = createdDataverse.getInt("data.id");
-        logger.info("Id found in create dataverse response: " + createdDataverse);
+        logger.info("Id found in create dataverse response: " + dataverseId);
         return dataverseId;
     }
 
@@ -370,14 +390,58 @@ static Response createRandomDataverse(String apiToken) {
         return createDataverse(alias, category, apiToken);
     }
 
+    /**
+     * A convenience method for creating a random collection and getting its
+     * alias in one step.
+     * @param apiToken
+     * @return alias
+     */
+    static String createRandomCollectionGetAlias(String apiToken){
+
+        Response createCollectionResponse = createRandomDataverse(apiToken);
+        //createDataverseResponse.prettyPrint();
+        createCollectionResponse.then().assertThat().statusCode(CREATED.getStatusCode());
+        return UtilIT.getAliasFromResponse(createCollectionResponse);
+    }
+
     static Response showDataverseContents(String alias, String apiToken) {
         return given()
                 .header(API_TOKEN_HTTP_HEADER, apiToken)
                 .when().get("/api/dataverses/" + alias + "/contents");
     }
 
+    static Response getGuestbookResponses(String dataverseAlias, Long guestbookId, String apiToken) {
+        RequestSpecification requestSpec = given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken);
+        if (guestbookId != null) {
+            requestSpec.queryParam("guestbookId", guestbookId);
+        }
+        return requestSpec.get("/api/dataverses/" + dataverseAlias + "/guestbookResponses/");
+    }
+
+    static Response getCollectionSchema(String dataverseAlias, String apiToken) {
+        Response getCollectionSchemaResponse = given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .contentType("application/json")
+                .get("/api/dataverses/" + dataverseAlias + "/datasetSchema");
+        return getCollectionSchemaResponse;
+    }
+
+    static Response validateDatasetJson(String dataverseAlias, String datasetJson, String apiToken) {
+        Response getValidateDatasetJsonResponse = given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(datasetJson)
+                .contentType("application/json")
+                .post("/api/dataverses/" + dataverseAlias + "/validateDatasetJson");
+        return getValidateDatasetJsonResponse;
+    }
+
     static Response createRandomDatasetViaNativeApi(String dataverseAlias, String apiToken) {
-        String jsonIn = getDatasetJson();
+        return createRandomDatasetViaNativeApi(dataverseAlias, apiToken, false);
+    }
+
+    static Response createRandomDatasetViaNativeApi(String dataverseAlias, String apiToken, boolean withNoLicense) {
+        String jsonIn = getDatasetJson(withNoLicense);
         Response createDatasetResponse = given()
                 .header(API_TOKEN_HTTP_HEADER, apiToken)
                 .body(jsonIn)
@@ -387,7 +451,16 @@ static Response createRandomDatasetViaNativeApi(String dataverseAlias, String ap
     }
 
     private static String getDatasetJson() {
-        File datasetVersionJson = new File("scripts/search/tests/data/dataset-finch1.json");
+        return getDatasetJson(false); 
+    }
+     
+    private static String getDatasetJson(boolean nolicense) {
+        File datasetVersionJson; 
+        if (nolicense) {
+            datasetVersionJson = new File("scripts/search/tests/data/dataset-finch1-nolicense.json");
+        } else {
+            datasetVersionJson = new File("scripts/search/tests/data/dataset-finch1.json");
+        }
         try {
             String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
             return datasetVersionAsJson;
@@ -506,7 +579,7 @@ static Response updateDatasetMetadataViaNative(String persistentId, String pathT
                 .header(API_TOKEN_HTTP_HEADER, apiToken)
                 .body(jsonIn)
                 .contentType("application/json")
-                .put("/api/datasets/:persistentId/versions/:draft?persistentId=" + persistentId);
+                .put("/api/datasets/:persistentId/versions/" + DS_VERSION_DRAFT + "?persistentId=" + persistentId);
         return response;
     }
     
@@ -550,7 +623,14 @@ static Response updateDatasetPIDMetadata(String persistentId,  String apiToken)
                 .post("/api/datasets/:persistentId/modifyRegistrationMetadata/?persistentId=" + persistentId);
         return response;
     }
-    
+
+    /**
+     * Deprecated because once there are new fields in the database that Solr
+     * doesn't know about, dataset creation could be prevented, or at least
+     * subsequent search operations could fail because the dataset can't be
+     * indexed.
+     */
+    @Deprecated    
     static Response loadMetadataBlock(String apiToken, byte[] body) {
         return given()
           .header(API_TOKEN_HTTP_HEADER, apiToken)
@@ -775,7 +855,7 @@ static Response deleteAuxFile(Long fileId, String formatTag, String formatVersio
     static Response getCrawlableFileAccess(String datasetId, String folderName, String apiToken) {
         RequestSpecification requestSpecification = given()
                 .header(API_TOKEN_HTTP_HEADER, apiToken);
-        String apiPath = "/api/datasets/" + datasetId + "/dirindex?version=:draft";
+        String apiPath = "/api/datasets/" + datasetId + "/dirindex?version=" + DS_VERSION_DRAFT;
         if (StringUtil.nonEmpty(folderName)) {
             apiPath = apiPath.concat("&folder="+folderName);
         }
@@ -794,7 +874,7 @@ static Response replaceFile(String fileIdOrPersistentId, String pathToFile, Json
     static Response replaceFile(String fileIdOrPersistentId, String pathToFile, String jsonAsString, String apiToken) {
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + fileIdOrPersistentId;
         }
@@ -807,11 +887,17 @@ static Response replaceFile(String fileIdOrPersistentId, String pathToFile, Stri
         return requestSpecification
                 .post("/api/files/" + idInPath + "/replace" + optionalQueryParam);
     }
+
+    static Response deleteFileApi(Integer fileId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .delete("/api/files/" + fileId);
+    }
     
     static Response updateFileMetadata(String fileIdOrPersistentId, String jsonAsString, String apiToken) {
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + fileIdOrPersistentId;
         }
@@ -936,7 +1022,7 @@ static Response downloadFiles(String datasetIdOrPersistentId, String datasetVers
     static Response downloadFiles(String datasetIdOrPersistentId, String datasetVersion, DownloadFormat format, String apiToken) {
         String idInPath = datasetIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(datasetIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(datasetIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + datasetIdOrPersistentId;
         }
@@ -969,7 +1055,7 @@ static Response subset(String fileId, String variables, String apiToken) {
     static Response getFileMetadata(String fileIdOrPersistentId, String optionalFormat, String apiToken) {
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "&persistentId=" + fileIdOrPersistentId;
         }
@@ -986,7 +1072,7 @@ static Response getFileMetadata(String fileIdOrPersistentId, String optionalForm
     static Response getFileMetadata(String fileIdOrPersistentId, String optionalFormat) {
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + fileIdOrPersistentId;
         }
@@ -1227,6 +1313,12 @@ static Response destroyDataset(Integer datasetId, String apiToken) {
                 .delete("/api/datasets/" + datasetId + "/destroy");
     }
 
+    static Response destroyDataset(String pid, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .delete("/api/datasets/:persistentId/destroy?persistentId=" + pid);
+    }
+
     static Response deleteFile(Integer fileId, String apiToken) {
         return given()
                 .auth().basic(apiToken, EMPTY_STRING)
@@ -1371,15 +1463,24 @@ static Response nativeGetUsingPersistentId(String persistentId, String apiToken)
     }
 
     static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken) {
+        return getDatasetVersion(persistentId, versionNumber, apiToken, false, false);
+    }
+
+    static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken, boolean skipFiles, boolean includeDeaccessioned) {
         return given()
                 .header(API_TOKEN_HTTP_HEADER, apiToken)
-                .get("/api/datasets/:persistentId/versions/" + versionNumber + "?persistentId=" + persistentId);
+                .queryParam("includeDeaccessioned", includeDeaccessioned)
+                .get("/api/datasets/:persistentId/versions/"
+                        + versionNumber
+                        + "?persistentId="
+                        + persistentId
+                        + (skipFiles ? "&includeFiles=false" : ""));
     }
 
     static Response getMetadataBlockFromDatasetVersion(String persistentId, String versionNumber, String metadataBlock, String apiToken) {
         return given()
                 .header(API_TOKEN_HTTP_HEADER, apiToken)
-                .get("/api/datasets/:persistentId/versions/:latest-published/metadata/citation?persistentId=" + persistentId);
+                .get("/api/datasets/:persistentId/versions/" + DS_VERSION_LATEST_PUBLISHED + "/metadata/citation?persistentId=" + persistentId);
     }
 
     static Response makeSuperUser(String username) {
@@ -1507,7 +1608,7 @@ static Response migrateBuiltinToOAuth(String data, String apiToken) {
     static Response restrictFile(String fileIdOrPersistentId, boolean restrict, String apiToken) {
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + fileIdOrPersistentId;
         }
@@ -1521,7 +1622,7 @@ static Response restrictFile(String fileIdOrPersistentId, boolean restrict, Stri
     static Response allowAccessRequests(String datasetIdOrPersistentId, boolean allowRequests, String apiToken) {
         String idInPath = datasetIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(datasetIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(datasetIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + datasetIdOrPersistentId;
         }
@@ -1537,7 +1638,7 @@ static Response requestFileAccess(String fileIdOrPersistentId, String apiToken)
         System.out.print ("Reuest file acceess + apiToken: " + apiToken);
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + fileIdOrPersistentId;
         }
@@ -1555,7 +1656,7 @@ static Response requestFileAccess(String fileIdOrPersistentId, String apiToken)
     static Response grantFileAccess(String fileIdOrPersistentId, String identifier, String apiToken) {
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + fileIdOrPersistentId;
         }
@@ -1571,7 +1672,7 @@ static Response grantFileAccess(String fileIdOrPersistentId, String identifier,
     static Response getAccessRequestList(String fileIdOrPersistentId, String apiToken) {
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + fileIdOrPersistentId;
         }
@@ -1587,7 +1688,7 @@ static Response getAccessRequestList(String fileIdOrPersistentId, String apiToke
     static Response rejectFileAccessRequest(String fileIdOrPersistentId, String identifier, String apiToken) {
         String idInPath = fileIdOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(fileIdOrPersistentId)) {
+        if (!NumberUtils.isCreatable(fileIdOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + fileIdOrPersistentId;
         }
@@ -1619,7 +1720,7 @@ static Response moveDataset(String idOrPersistentIdOfDatasetToMove, String desti
         }
         String idInPath = idOrPersistentIdOfDatasetToMove; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentIdOfDatasetToMove)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDatasetToMove)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentIdOfDatasetToMove;
         }
@@ -1673,9 +1774,10 @@ static Response privateUrlGet(Integer datasetId, String apiToken) {
         return response;
     }
 
-    static Response privateUrlCreate(Integer datasetId, String apiToken) {
+    static Response privateUrlCreate(Integer datasetId, String apiToken, boolean anonymizedAccess) {
         Response response = given()
                 .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .queryParam("anonymizedAccess", anonymizedAccess)
                 .post("/api/datasets/" + datasetId + "/privateUrl");
         return response;
     }
@@ -1734,13 +1836,46 @@ static Response removeDatasetThumbnail(String datasetPersistentId, String apiTok
     }
     
     static Response getDatasetVersions(String idOrPersistentId, String apiToken) {
+        return getDatasetVersions(idOrPersistentId, apiToken, false);
+    }
+
+    static Response getDatasetVersions(String idOrPersistentId, String apiToken, boolean skipFiles) {
+        return getDatasetVersions(idOrPersistentId, apiToken, null, null, skipFiles);
+    }
+
+    static Response getDatasetVersions(String idOrPersistentId, String apiToken, Integer offset, Integer limit) {
+        return getDatasetVersions(idOrPersistentId, apiToken, offset, limit, false);
+    }
+
+    static Response getDatasetVersions(String idOrPersistentId, String apiToken, Integer offset, Integer limit, boolean skipFiles) {
         logger.info("Getting Dataset Versions");
         String idInPath = idOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentId)) {
+        if (!NumberUtils.isCreatable(idOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentId;
         }
+        if (skipFiles) {
+            if ("".equals(optionalQueryParam)) {
+                optionalQueryParam = "?includeFiles=false";
+            } else {
+                optionalQueryParam = optionalQueryParam.concat("&includeFiles=false");
+            }
+        }
+        if (offset != null) {
+            if ("".equals(optionalQueryParam)) {
+                optionalQueryParam = "?offset="+offset;
+            } else {
+                optionalQueryParam = optionalQueryParam.concat("&offset="+offset);
+            }
+        }
+        if (limit != null) {
+            if ("".equals(optionalQueryParam)) {
+                optionalQueryParam = "?limit="+limit;
+            } else {
+                optionalQueryParam = optionalQueryParam.concat("&limit="+limit);
+            }
+        }
         RequestSpecification requestSpecification = given();
         if (apiToken != null) {
             requestSpecification = given()
@@ -1754,7 +1889,7 @@ static Response getProvJson(String idOrPersistentId, String apiToken) {
         logger.info("Getting Provenance JSON");
         String idInPath = idOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentId)) {
+        if (!NumberUtils.isCreatable(idOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentId;
         }
@@ -1770,7 +1905,7 @@ static Response getProvFreeForm(String idOrPersistentId, String apiToken) {
          logger.info("Getting Provenance Free Form");
         String idInPath = idOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentId)) {
+        if (!NumberUtils.isCreatable(idOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentId;
         }
@@ -1786,7 +1921,7 @@ static Response uploadProvJson(String idOrPersistentId, JsonObject jsonObject, S
         logger.info("Uploading Provenance JSON");
         String idInPath = idOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentId)) {
+        if (!NumberUtils.isCreatable(idOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentId;
         }
@@ -1808,7 +1943,7 @@ static Response deleteProvJson(String idOrPersistentId, String apiToken) {
         //TODO: Repeated code, refactor
         String idInPath = idOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentId)) {
+        if (!NumberUtils.isCreatable(idOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentId;
         }
@@ -1824,7 +1959,7 @@ static Response uploadProvFreeForm(String idOrPersistentId, JsonObject jsonObjec
         logger.info("Uploading Provenance Free Form");
         String idInPath = idOrPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentId)) {
+        if (!NumberUtils.isCreatable(idOrPersistentId)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentId;
         }
@@ -1845,7 +1980,7 @@ static Response uploadProvFreeForm(String idOrPersistentId, JsonObject jsonObjec
 //        //TODO: Repeated code, refactor
 //        String idInPath = idOrPersistentId; // Assume it's a number.
 //        String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-//        if (!NumberUtils.isNumber(idOrPersistentId)) {
+//        if (!NumberUtils.isCreatable(idOrPersistentId)) {
 //            idInPath = ":persistentId";
 //            optionalQueryParam = "?persistentId=" + idOrPersistentId;
 //        }
@@ -1891,6 +2026,7 @@ static Response exportDataverse(String identifier, String apiToken) {
     }
     
     static Response search(String query, String apiToken, String parameterString) {
+        sleepForDatasetIndex(query, apiToken);
         RequestSpecification requestSpecification = given();
         if (apiToken != null) {
             requestSpecification = given()
@@ -1899,11 +2035,24 @@ static Response search(String query, String apiToken, String parameterString) {
         return requestSpecification.get("/api/search?q=" + query + parameterString);
     }
 
+    private static void sleepForDatasetIndex(String query, String apiToken) {
+        if (query.contains("id:dataset") || query.contains("id:datafile")) {
+            String[] splitted = query.split("_");
+            if (splitted.length >= 2) {
+                boolean ok = UtilIT.sleepForReindex(String.valueOf(splitted[1]), apiToken, 5);
+                if (!ok) {
+                    logger.info("Still indexing after 5 seconds");
+                }
+            }
+        }
+    }
+
     static Response search(String query, String apiToken) {
         return search(query, apiToken, "");
     }
 
     static Response searchAndShowFacets(String query, String apiToken) {
+        sleepForDatasetIndex(query, apiToken);
         RequestSpecification requestSpecification = given();
         if (apiToken != null) {
             requestSpecification = given()
@@ -2071,6 +2220,12 @@ static Response setDataverseLogo(String dataverseAlias, String pathToImageFile,
                 .multiPart("file", new File(pathToImageFile))
                 .put("/api/dataverses/" + dataverseAlias + "/logo");
     }
+    
+    static Response setCollectionAttribute(String dataverseAlias, String attribute, String value, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .put("/api/dataverses/" + dataverseAlias + "/attribute/" + attribute + "?value=" + value);
+    }
 
     /**
      * Deprecated as the apiToken is not used by the call.
@@ -2125,7 +2280,7 @@ static Response getRsyncScript(String datasetPersistentId, String apiToken) {
     static Response dataCaptureModuleChecksumValidation(String datasetPersistentId, JsonObject jsonObject, String apiToken) {
         String persistentIdInPath = datasetPersistentId; // Assume it's a number.
         String optionalQueryParam = ""; // If datasetPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(datasetPersistentId)) {
+        if (!NumberUtils.isCreatable(datasetPersistentId)) {
             persistentIdInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + datasetPersistentId;
         }
@@ -2172,7 +2327,7 @@ static Response deleteExternalTool(long externalToolid) {
     static Response getExternalToolsForDataset(String idOrPersistentIdOfDataset, String type, String apiToken) {
         String idInPath = idOrPersistentIdOfDataset; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentIdOfDataset)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
             idInPath = ":persistentId";
             optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset;
         }
@@ -2187,7 +2342,7 @@ static Response getExternalToolsForDataset(String idOrPersistentIdOfDataset, Str
     static Response getExternalToolsForFile(String idOrPersistentIdOfFile, String type, String apiToken) {
         String idInPath = idOrPersistentIdOfFile; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentIdOfFile)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfFile)) {
             idInPath = ":persistentId";
             optionalQueryParam = "&persistentId=" + idOrPersistentIdOfFile;
         }
@@ -2235,6 +2390,56 @@ static Response deleteStorageSite(long storageSiteId) {
                 .delete("/api/admin/storageSites/" + storageSiteId);
     }
 
+    static Response listStorageDrivers(String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/admin/dataverse/storageDrivers");
+    }
+
+    static Response getStorageDriver(String dvAlias, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/admin/dataverse/" + dvAlias + "/storageDriver");
+    }
+
+    static Response setStorageDriver(String dvAlias, String label, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(label)
+                .put("/api/admin/dataverse/" + dvAlias + "/storageDriver");
+    }
+
+    static Response getUploadUrls(String idOrPersistentIdOfDataset, long sizeInBytes, String apiToken) {
+        String idInPath = idOrPersistentIdOfDataset; // Assume it's a number.
+        String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
+            idInPath = ":persistentId";
+            optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset;
+        }
+        RequestSpecification requestSpecification = given();
+        if (apiToken != null) {
+            requestSpecification = given()
+                    .header(API_TOKEN_HTTP_HEADER, apiToken);
+        }
+        return requestSpecification.get("/api/datasets/" + idInPath + "/uploadurls?size=" + sizeInBytes + optionalQueryParam);
+    }
+
+    static Response uploadFileDirect(String url, InputStream inputStream) {
+        return given()
+                .header("x-amz-tagging", "dv-state=temp")
+                .body(inputStream)
+                .put(url);
+    }
+
+    static Response downloadFileNoRedirect(Integer fileId, String apiToken) {
+        return given().when().redirects().follow(false)
+                .get("/api/access/datafile/" + fileId + "?key=" + apiToken);
+    }
+
+    static Response downloadFromUrl(String url) {
+        return given().get(url);
+    }
+
     static Response metricsDataversesToMonth(String yyyymm, String queryParams) {
         String optionalYyyyMm = "";
         if (yyyymm != null) {
@@ -2363,6 +2568,12 @@ static Response clearMetricCache() {
         RequestSpecification requestSpecification = given();
         return requestSpecification.delete("/api/admin/clearMetricsCache");
     }
+    
+    static Response clearMetricCache(String name) {
+        RequestSpecification requestSpecification = given();
+        return requestSpecification.delete("/api/admin/clearMetricsCache/" + name);
+    }
+
 
     static Response sitemapUpdate() {
         return given()
@@ -2480,10 +2691,14 @@ static boolean sleepForReindex(String idOrPersistentId, String apiToken, int dur
         do {
             timestampResponse = UtilIT.getDatasetTimestamps(idOrPersistentId, apiToken);
             System.out.println(timestampResponse.body().asString());
-            String hasStaleIndex = timestampResponse.body().jsonPath().getString("data.hasStaleIndex");
-            System.out.println(hasStaleIndex);
-            stale = Boolean.parseBoolean(hasStaleIndex);
-            
+            try {
+                String hasStaleIndex = timestampResponse.body().jsonPath().getString("data.hasStaleIndex");
+                System.out.println(hasStaleIndex);
+                stale = Boolean.parseBoolean(hasStaleIndex);
+            } catch (IllegalArgumentException ex) {
+                Logger.getLogger(UtilIT.class.getName()).log(Level.INFO, "no stale index property found", ex);
+                stale = false;
+            }
             try {
                 Thread.sleep(sleepStep);
                 i++;
@@ -2526,10 +2741,25 @@ static boolean sleepForReexport(String idOrPersistentId, String apiToken, int du
         return i <= repeats;
 
     }
-    
-    
-    
-    
+
+    // Modeled after sleepForLock but the dataset isn't locked.
+    // We have to sleep or we can't perform the next operation.
+    static Boolean sleepForDeadlock(int duration) {
+        int i = 0;
+        do {
+            try {
+                Thread.sleep(1000);
+                i++;
+                if (i > duration) {
+                    break;
+                }
+            } catch (InterruptedException ex) {
+                Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex);
+            }
+        } while (true);
+        return i <= duration;
+    }
+
     //Helper function that returns true if a given search returns a non-zero response within a fixed time limit
     // a given duration returns false if still zero results after given duration
     static Boolean sleepForSearch(String searchPart, String apiToken,  String subTree, int duration) {
@@ -2563,7 +2793,7 @@ static Response checkDatasetLocks(long datasetId, String lockType, String apiTok
     static Response checkDatasetLocks(String idOrPersistentId, String lockType, String apiToken) {
         String idInPath = idOrPersistentId; // Assume it's a number.
         String queryParams = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentId)) {
+        if (!NumberUtils.isCreatable(idOrPersistentId)) {
             idInPath = ":persistentId";
             queryParams = "?persistentId=" + idOrPersistentId;
         }
@@ -2628,10 +2858,12 @@ static Response getDatasetTimestamps(String idOrPersistentId, String apiToken) {
             queryParams = "?persistentId=" + idOrPersistentId;
         }
         
-        Response response = given()
-            .header(API_TOKEN_HTTP_HEADER, apiToken)
-            .get("api/datasets/" + idInPath + "/timestamps" + queryParams);
-        return response;
+        RequestSpecification requestSpecification = given();
+        if (apiToken != null) {
+            requestSpecification = given()
+                    .header(UtilIT.API_TOKEN_HTTP_HEADER, apiToken);
+        }
+        return requestSpecification.get("api/datasets/" + idInPath + "/timestamps" + queryParams);
     }
     
     static Response exportOaiSet(String setName) {
@@ -2732,7 +2964,7 @@ static Response makeDataCountGetMetricForDataset(String idOrPersistentIdOfDatase
         System.out.println("metric: " + metric);
         String idInPath = idOrPersistentIdOfDataset; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentIdOfDataset)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
             idInPath = ":persistentId";
             optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset;
         }
@@ -2748,7 +2980,7 @@ static Response makeDataCountGetMetricForDataset(String idOrPersistentIdOfDatase
         System.out.println("metric: " + metric);
         String idInPath = idOrPersistentIdOfDataset; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentIdOfDataset)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
             idInPath = ":persistentId";
             optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset;
         }
@@ -2764,7 +2996,7 @@ static Response makeDataCountGetMetricForDataset(String idOrPersistentIdOfDatase
         System.out.println("metric: " + metric);
         String idInPath = idOrPersistentIdOfDataset; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentIdOfDataset)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
             idInPath = ":persistentId";
             optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset;
         }
@@ -2780,7 +3012,7 @@ static Response makeDataCountAddUsageMetricsFromSushiReport(String idOrPersisten
 
         String idInPath = idOrPersistentIdOfDataset; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentIdOfDataset)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
             idInPath = ":persistentId";
             optionalQueryParam = "&persistentId=" + idOrPersistentIdOfDataset;
         }
@@ -2793,7 +3025,7 @@ static Response makeDataCountUpdateCitationsForDataset(String idOrPersistentIdOf
 
         String idInPath = idOrPersistentIdOfDataset; // Assume it's a number.
         String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path.
-        if (!NumberUtils.isNumber(idOrPersistentIdOfDataset)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentIdOfDataset;
         }
@@ -2830,6 +3062,31 @@ static Response findDataverseStorageSize(String dataverseId, String apiToken) {
                 .get("/api/dataverses/" + dataverseId + "/storagesize");
     }
     
+    static Response checkCollectionQuota(String collectionId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/dataverses/" + collectionId + "/storage/quota");
+    }
+    
+    static Response setCollectionQuota(String collectionId, long allocatedSize, String apiToken) {
+        Response response = given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .post("/api/dataverses/" + collectionId + "/storage/quota/" + allocatedSize);
+        return response;
+    }
+    
+    static Response disableCollectionQuota(String collectionId, String apiToken) {
+        Response response = given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .delete("/api/dataverses/" + collectionId + "/storage/quota");
+        return response;
+    }
+    
+    static Response checkCollectionStorageUse(String collectionId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/dataverses/" + collectionId + "/storage/use");
+    }
     
     /**
      * Determine the "payload" storage size of a dataverse
@@ -2846,7 +3103,7 @@ static Response findDatasetStorageSize(String datasetId, String apiToken) {
     
     static Response findDatasetDownloadSize(String datasetId) {
         return given()
-                .get("/api/datasets/" + datasetId + "/versions/:latest/downloadsize");
+                .get("/api/datasets/" + datasetId + "/versions/" + DS_VERSION_LATEST + "/downloadsize");
     }
     
     static Response findDatasetDownloadSize(String datasetId, String version,  String apiToken) {
@@ -3075,7 +3332,7 @@ static Response getDatasetVersionArchivalStatus(Integer datasetId, String versio
     static Response archiveDataset(String idOrPersistentIdOfDataset, String version, String apiToken) {
         String idInPath = idOrPersistentIdOfDataset;
         String optionalQueryParam = "";
-        if (!NumberUtils.isNumber(idOrPersistentIdOfDataset)) {
+        if (!NumberUtils.isCreatable(idOrPersistentIdOfDataset)) {
             idInPath = ":persistentId";
             optionalQueryParam = "?persistentId=" + idOrPersistentIdOfDataset;
         }
@@ -3137,4 +3394,328 @@ static Response importDatasetDDIViaNativeApi(String apiToken, String dataverseAl
 
         return importDDI.post(postString);
     }
+
+    static Response retrieveMyDataAsJsonString(String apiToken, String userIdentifier, ArrayList<Long> roleIds) {
+        Response response = given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .contentType("application/json; charset=utf-8")
+                .queryParam("role_ids", roleIds)
+                .queryParam("dvobject_types", MyDataFilterParams.defaultDvObjectTypes)
+                .queryParam("published_states", MyDataFilterParams.defaultPublishedStates)
+                .get("/api/mydata/retrieve?userIdentifier=" + userIdentifier);
+        return response;
+    }
+
+    static Response createSignedUrl(String apiToken, String apiPath, String username) {
+        Response response = given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(String.format("{\"url\":\"%s\",\"timeOut\":35,\"user\":\"%s\"}", getRestAssuredBaseUri() + apiPath, username))
+                .contentType("application/json")
+                .post("/api/admin/requestSignedUrl");
+        return response;
+    }
+
+    static String getSignedUrlFromResponse(Response createSignedUrlResponse) {
+        JsonPath jsonPath = JsonPath.from(createSignedUrlResponse.body().asString());
+        String signedUrl = jsonPath.getString("data.signedUrl");
+        return signedUrl;
+    }
+
+    static Response logout() {
+        Response response = given()
+                .contentType("application/json")
+                .post("/api/logout");
+        return response;
+    }
+
+    static Response getDatasetSummaryFieldNames() {
+        Response response = given()
+                .contentType("application/json")
+                .get("/api/datasets/summaryFieldNames");
+        return response;
+    }
+
+    static Response getPrivateUrlDatasetVersion(String privateUrlToken) {
+        Response response = given()
+                .contentType("application/json")
+                .get("/api/datasets/privateUrlDatasetVersion/" + privateUrlToken);
+        return response;
+    }
+
+    static Response getPrivateUrlDatasetVersionCitation(String privateUrlToken) {
+        Response response = given()
+                .contentType("application/json")
+                .get("/api/datasets/privateUrlDatasetVersion/" + privateUrlToken + "/citation");
+        return response;
+    }
+
+    static Response getDatasetVersionCitation(Integer datasetId, String version, boolean includeDeaccessioned, String apiToken) {
+        Response response = given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .contentType("application/json")
+                .queryParam("includeDeaccessioned", includeDeaccessioned)
+                .get("/api/datasets/" + datasetId + "/versions/" + version + "/citation");
+        return response;
+    }
+
+    static Response getVersionFiles(Integer datasetId,
+                                    String version,
+                                    Integer limit,
+                                    Integer offset,
+                                    String contentType,
+                                    String accessStatus,
+                                    String categoryName,
+                                    String tabularTagName,
+                                    String searchText,
+                                    String orderCriteria,
+                                    boolean includeDeaccessioned,
+                                    String apiToken) {
+        RequestSpecification requestSpecification = given()
+                .contentType("application/json")
+                .queryParam("includeDeaccessioned", includeDeaccessioned);
+        if (apiToken != null) {
+            requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken);
+        }
+        if (limit != null) {
+            requestSpecification = requestSpecification.queryParam("limit", limit);
+        }
+        if (offset != null) {
+            requestSpecification = requestSpecification.queryParam("offset", offset);
+        }
+        if (contentType != null) {
+            requestSpecification = requestSpecification.queryParam("contentType", contentType);
+        }
+        if (accessStatus != null) {
+            requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus);
+        }
+        if (categoryName != null) {
+            requestSpecification = requestSpecification.queryParam("categoryName", categoryName);
+        }
+        if (tabularTagName != null) {
+            requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName);
+        }
+        if (searchText != null) {
+            requestSpecification = requestSpecification.queryParam("searchText", searchText);
+        }
+        if (orderCriteria != null) {
+            requestSpecification = requestSpecification.queryParam("orderCriteria", orderCriteria);
+        }
+        return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files");
+    }
+
+    static Response createAndUploadTestFile(String persistentId, String testFileName, byte[] testFileContentInBytes, String apiToken) throws IOException {
+        Path pathToTempDir = Paths.get(Files.createTempDirectory(null).toString());
+        String pathToTestFile = pathToTempDir + File.separator + testFileName;
+        File testFile = new File(pathToTestFile);
+        FileOutputStream fileOutputStream = new FileOutputStream(testFile);
+
+        fileOutputStream.write(testFileContentInBytes);
+        fileOutputStream.flush();
+        fileOutputStream.close();
+
+        return uploadZipFileViaSword(persistentId, pathToTestFile, apiToken);
+    }
+
+    static Response getFileDownloadCount(String dataFileId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/files/" + dataFileId + "/downloadCount");
+    }
+
+    static Response getFileDataTables(String dataFileId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/files/" + dataFileId + "/dataTables");
+    }
+
+    static Response getUserFileAccessRequested(String dataFileId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/access/datafile/" + dataFileId + "/userFileAccessRequested");
+    }
+
+    static Response getUserPermissionsOnFile(String dataFileId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/access/datafile/" + dataFileId + "/userPermissions");
+    }
+
+    static Response getUserPermissionsOnDataset(String datasetId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/datasets/" + datasetId + "/userPermissions");
+    }
+
+    static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) {
+        JsonObjectBuilder jsonBuilder = Json.createObjectBuilder();
+        jsonBuilder.add("dateAvailable", dateAvailable);
+        jsonBuilder.add("reason", "This is a test embargo");
+        jsonBuilder.add("fileIds", Json.createArrayBuilder().add(fileId));
+        String jsonString = jsonBuilder.build().toString();
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(jsonString)
+                .contentType("application/json")
+                .urlEncodingEnabled(false)
+                .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo");
+    }
+
+    static Response getVersionFileCounts(Integer datasetId,
+                                         String version,
+                                         String contentType,
+                                         String accessStatus,
+                                         String categoryName,
+                                         String tabularTagName,
+                                         String searchText,
+                                         boolean includeDeaccessioned,
+                                         String apiToken) {
+        RequestSpecification requestSpecification = given()
+                .queryParam("includeDeaccessioned", includeDeaccessioned);
+        if (apiToken != null) {
+            requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken);
+        }
+        if (contentType != null) {
+            requestSpecification = requestSpecification.queryParam("contentType", contentType);
+        }
+        if (accessStatus != null) {
+            requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus);
+        }
+        if (categoryName != null) {
+            requestSpecification = requestSpecification.queryParam("categoryName", categoryName);
+        }
+        if (tabularTagName != null) {
+            requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName);
+        }
+        if (searchText != null) {
+            requestSpecification = requestSpecification.queryParam("searchText", searchText);
+        }
+        return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts");
+    }
+
+    static Response setFileCategories(String dataFileId, String apiToken, List<String> categories) {
+        JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder();
+        for (String category : categories) {
+            jsonArrayBuilder.add(category);
+        }
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        jsonObjectBuilder.add("categories", jsonArrayBuilder);
+        String jsonString = jsonObjectBuilder.build().toString();
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(jsonString)
+                .post("/api/files/" + dataFileId + "/metadata/categories");
+    }
+
+    static Response setFileTabularTags(String dataFileId, String apiToken, List<String> tabularTags) {
+        JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder();
+        for (String tabularTag : tabularTags) {
+            jsonArrayBuilder.add(tabularTag);
+        }
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        jsonObjectBuilder.add("tabularTags", jsonArrayBuilder);
+        String jsonString = jsonObjectBuilder.build().toString();
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(jsonString)
+                .post("/api/files/" + dataFileId + "/metadata/tabularTags");
+    }
+
+    static Response deleteFileInDataset(Integer fileId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .delete("/api/files/" + fileId);
+    }
+
+    static Response getHasBeenDeleted(String dataFileId, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/files/" + dataFileId + "/hasBeenDeleted");
+    }
+
+    static Response deaccessionDataset(Integer datasetId, String version, String deaccessionReason, String deaccessionForwardURL, String apiToken) {
+        JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder();
+        jsonObjectBuilder.add("deaccessionReason", deaccessionReason);
+        if (deaccessionForwardURL != null) {
+            jsonObjectBuilder.add("deaccessionForwardURL", deaccessionForwardURL);
+        }
+        String jsonString = jsonObjectBuilder.build().toString();
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(jsonString)
+                .post("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession");
+    }
+
+    static Response getDownloadSize(Integer datasetId,
+                                    String version,
+                                    String contentType,
+                                    String accessStatus,
+                                    String categoryName,
+                                    String tabularTagName,
+                                    String searchText,
+                                    String mode,
+                                    boolean includeDeaccessioned,
+                                    String apiToken) {
+        RequestSpecification requestSpecification = given()
+                .queryParam("includeDeaccessioned", includeDeaccessioned)
+                .queryParam("mode", mode);
+        if (apiToken != null) {
+            requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken);
+        }
+        if (contentType != null) {
+            requestSpecification = requestSpecification.queryParam("contentType", contentType);
+        }
+        if (accessStatus != null) {
+            requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus);
+        }
+        if (categoryName != null) {
+            requestSpecification = requestSpecification.queryParam("categoryName", categoryName);
+        }
+        if (tabularTagName != null) {
+            requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName);
+        }
+        if (searchText != null) {
+            requestSpecification = requestSpecification.queryParam("searchText", searchText);
+        }
+        return requestSpecification
+                .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize");
+    }
+
+    static Response downloadTmpFile(String fullyQualifiedPathToFile, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .get("/api/admin/downloadTmpFile?fullyQualifiedPathToFile=" + fullyQualifiedPathToFile);
+    }
+
+    static Response setDatasetStorageDriver(Integer datasetId, String driverLabel, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(driverLabel)
+                .put("/api/datasets/" + datasetId + "/storageDriver");
+    }
+    
+    
+    //Globus Store related - not currently used
+    
+    static Response getDatasetGlobusUploadParameters(Integer datasetId, String locale, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .contentType("application/json")
+                .get("/api/datasets/" + datasetId + "/globusUploadParameters?locale=" + locale);
+    }
+    
+    static Response getDatasetGlobusDownloadParameters(Integer datasetId, String locale, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .contentType("application/json")
+                .get("/api/datasets/" + datasetId + "/globusDownloadParameters?locale=" + locale);
+    }
+    
+    static Response requestGlobusDownload(Integer datasetId, JsonObject body, String apiToken) {
+        return given()
+                .header(API_TOKEN_HTTP_HEADER, apiToken)
+                .body(body)
+                .contentType("application/json")
+                .post("/api/datasets/" + datasetId + "/requestGlobusDownload");
+    }
+
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java
new file mode 100644
index 00000000000..486697664e6
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/ApiKeyAuthMechanismTest.java
@@ -0,0 +1,130 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.UserServiceBean;
+import edu.harvard.iq.dataverse.api.auth.doubles.ApiKeyContainerRequestTestFake;
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+
+import static edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism.ACCESS_DATAFILE_PATH_PREFIX;
+import static edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism.RESPONSE_MESSAGE_BAD_API_KEY;
+import static org.junit.jupiter.api.Assertions.*;
+
+public class ApiKeyAuthMechanismTest {
+
+    private static final String TEST_API_KEY = "test-api-key";
+    private static final String TEST_PATH = "/test/path/";
+
+    private ApiKeyAuthMechanism sut;
+
+    private final PrivateUrlUser testAnonymizedPrivateUrlUser = new PrivateUrlUser(1L, true);
+
+    @BeforeEach
+    public void setUp() {
+        sut = new ApiKeyAuthMechanism();
+    }
+
+    @Test
+    public void testFindUserFromRequest_ApiKeyNotProvided() throws WrappedAuthErrorResponse {
+        sut.privateUrlSvc = Mockito.mock(PrivateUrlServiceBean.class);
+        sut.authSvc = Mockito.mock(AuthenticationServiceBean.class);
+        sut.userSvc = Mockito.mock(UserServiceBean.class);
+
+        ContainerRequestContext testContainerRequest = new ApiKeyContainerRequestTestFake(null, TEST_PATH);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertNull(actual);
+    }
+
+    @Test
+    public void testFindUserFromRequest_ApiKeyProvided_NotAnonymizedPrivateUrlUserAuthenticated() throws WrappedAuthErrorResponse {
+        PrivateUrlServiceBean privateUrlServiceStub = Mockito.mock(PrivateUrlServiceBean.class);
+        PrivateUrlUser testPrivateUrlUser = new PrivateUrlUser(1L);
+        Mockito.when(privateUrlServiceStub.getPrivateUrlUserFromToken(TEST_API_KEY)).thenReturn(testPrivateUrlUser);
+        sut.privateUrlSvc = privateUrlServiceStub;
+
+        sut.authSvc = Mockito.mock(AuthenticationServiceBean.class);
+        sut.userSvc = Mockito.mock(UserServiceBean.class);
+
+        ContainerRequestContext testContainerRequest = new ApiKeyContainerRequestTestFake(TEST_API_KEY, TEST_PATH);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertEquals(testPrivateUrlUser, actual);
+    }
+
+    @Test
+    public void testFindUserFromRequest_ApiKeyProvided_AnonymizedPrivateUrlUserAuthenticated_AccessingAccessDatafilePath() throws WrappedAuthErrorResponse {
+        PrivateUrlServiceBean privateUrlServiceStub = Mockito.mock(PrivateUrlServiceBean.class);
+        Mockito.when(privateUrlServiceStub.getPrivateUrlUserFromToken(TEST_API_KEY)).thenReturn(testAnonymizedPrivateUrlUser);
+        sut.privateUrlSvc = privateUrlServiceStub;
+
+        sut.authSvc = Mockito.mock(AuthenticationServiceBean.class);
+        sut.userSvc = Mockito.mock(UserServiceBean.class);
+
+        ContainerRequestContext testContainerRequest = new ApiKeyContainerRequestTestFake(TEST_API_KEY, ACCESS_DATAFILE_PATH_PREFIX);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertEquals(testAnonymizedPrivateUrlUser, actual);
+    }
+
+    @Test
+    public void testFindUserFromRequest_ApiKeyProvided_AnonymizedPrivateUrlUserAuthenticated_NotAccessingAccessDatafilePath() {
+        PrivateUrlServiceBean privateUrlServiceStub = Mockito.mock(PrivateUrlServiceBean.class);
+        Mockito.when(privateUrlServiceStub.getPrivateUrlUserFromToken(TEST_API_KEY)).thenReturn(testAnonymizedPrivateUrlUser);
+        sut.privateUrlSvc = privateUrlServiceStub;
+
+        sut.authSvc = Mockito.mock(AuthenticationServiceBean.class);
+        sut.userSvc = Mockito.mock(UserServiceBean.class);
+
+        ContainerRequestContext testContainerRequest = new ApiKeyContainerRequestTestFake(TEST_API_KEY, TEST_PATH);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        assertEquals(RESPONSE_MESSAGE_BAD_API_KEY, wrappedAuthErrorResponse.getMessage());
+    }
+
+    @Test
+    public void testFindUserFromRequest_ApiKeyProvided_AuthenticatedUser() throws WrappedAuthErrorResponse {
+        PrivateUrlServiceBean privateUrlServiceStub = Mockito.mock(PrivateUrlServiceBean.class);
+        Mockito.when(privateUrlServiceStub.getPrivateUrlUserFromToken(TEST_API_KEY)).thenReturn(null);
+        sut.privateUrlSvc = privateUrlServiceStub;
+
+        AuthenticationServiceBean authenticationServiceBeanStub = Mockito.mock(AuthenticationServiceBean.class);
+        AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser();
+        Mockito.when(authenticationServiceBeanStub.lookupUser(TEST_API_KEY)).thenReturn(testAuthenticatedUser);
+        sut.authSvc = authenticationServiceBeanStub;
+
+        UserServiceBean userServiceBeanStub = Mockito.mock(UserServiceBean.class);
+        Mockito.when(userServiceBeanStub.updateLastApiUseTime(testAuthenticatedUser)).thenReturn(testAuthenticatedUser);
+        sut.userSvc = userServiceBeanStub;
+
+        ContainerRequestContext testContainerRequest = new ApiKeyContainerRequestTestFake(TEST_API_KEY, TEST_PATH);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertEquals(testAuthenticatedUser, actual);
+    }
+
+    @Test
+    public void testFindUserFromRequest_ApiKeyProvided_CanNotAuthenticateUserWithAnyMethod() {
+        PrivateUrlServiceBean privateUrlServiceStub = Mockito.mock(PrivateUrlServiceBean.class);
+        Mockito.when(privateUrlServiceStub.getPrivateUrlUserFromToken(TEST_API_KEY)).thenReturn(null);
+        sut.privateUrlSvc = privateUrlServiceStub;
+
+        AuthenticationServiceBean authenticationServiceBeanStub = Mockito.mock(AuthenticationServiceBean.class);
+        Mockito.when(authenticationServiceBeanStub.lookupUser(TEST_API_KEY)).thenReturn(null);
+        sut.authSvc = authenticationServiceBeanStub;
+
+        sut.userSvc = Mockito.mock(UserServiceBean.class);
+
+        ContainerRequestContext testContainerRequest = new ApiKeyContainerRequestTestFake(TEST_API_KEY, TEST_PATH);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        assertEquals(RESPONSE_MESSAGE_BAD_API_KEY, wrappedAuthErrorResponse.getMessage());
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java
new file mode 100644
index 00000000000..7e1c23d26f4
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/BearerTokenAuthMechanismTest.java
@@ -0,0 +1,167 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import com.nimbusds.oauth2.sdk.ParseException;
+import com.nimbusds.oauth2.sdk.token.BearerAccessToken;
+import edu.harvard.iq.dataverse.UserServiceBean;
+import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake;
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
+import edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthProvider;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Optional;
+
+import static edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism.*;
+import static org.junit.jupiter.api.Assertions.*;
+
+@LocalJvmSettings
+@JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-bearer-auth")
+class BearerTokenAuthMechanismTest {
+
+    private static final String TEST_API_KEY = "test-api-key";
+
+    private BearerTokenAuthMechanism sut;
+
+    @BeforeEach
+    public void setUp() {
+        sut = new BearerTokenAuthMechanism();
+        sut.authSvc = Mockito.mock(AuthenticationServiceBean.class);
+        sut.userSvc = Mockito.mock(UserServiceBean.class);
+    }
+
+    @Test
+    void testFindUserFromRequest_no_token() throws WrappedAuthErrorResponse {
+        ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake(null);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertNull(actual);
+    }
+
+    @Test
+    void testFindUserFromRequest_invalid_token() {
+        Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet());
+        
+        ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer ");
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        //then
+        assertEquals(INVALID_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage());
+    }
+    @Test
+    void testFindUserFromRequest_no_OidcProvider() {
+        Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.emptySet());
+        
+        ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " +TEST_API_KEY);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        //then
+        assertEquals(BEARER_TOKEN_DETECTED_NO_OIDC_PROVIDER_CONFIGURED, wrappedAuthErrorResponse.getMessage());
+    }
+
+    @Test
+    void testFindUserFromRequest_oneProvider_invalidToken_1() throws ParseException, IOException {
+        OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class);
+        String providerID = "OIEDC";
+        Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID);
+        // ensure that a valid OIDCAuthProvider is available within the AuthenticationServiceBean
+        Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.singleton(providerID));
+        Mockito.when(sut.authSvc.getAuthenticationProvider(providerID)).thenReturn(oidcAuthProvider);
+
+        // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token
+        BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY);
+        Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.empty());
+
+        // when
+        ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        //then
+        assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage());
+    }
+
+    @Test
+    void testFindUserFromRequest_oneProvider_invalidToken_2() throws ParseException, IOException {
+        OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class);
+        String providerID = "OIEDC";
+        Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID);
+        // ensure that a valid OIDCAuthProvider is available within the AuthenticationServiceBean
+        Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.singleton(providerID));
+        Mockito.when(sut.authSvc.getAuthenticationProvider(providerID)).thenReturn(oidcAuthProvider);
+
+        // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token
+        BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY);
+        Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenThrow(IOException.class);
+
+        // when
+        ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        //then
+        assertEquals(UNAUTHORIZED_BEARER_TOKEN, wrappedAuthErrorResponse.getMessage());
+    }
+    @Test
+    void testFindUserFromRequest_oneProvider_validToken() throws WrappedAuthErrorResponse, ParseException, IOException {
+        OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class);
+        String providerID = "OIEDC";
+        Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID);
+        // ensure that a valid OIDCAuthProvider is available within the AuthenticationServiceBean
+        Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.singleton(providerID));
+        Mockito.when(sut.authSvc.getAuthenticationProvider(providerID)).thenReturn(oidcAuthProvider);
+
+        // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token
+        UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY");
+        BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY);
+        Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo));
+
+        // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier
+        AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser();
+        Mockito.when(sut.authSvc.lookupUser(userinfo)).thenReturn(testAuthenticatedUser);
+        Mockito.when(sut.userSvc.updateLastApiUseTime(testAuthenticatedUser)).thenReturn(testAuthenticatedUser);
+
+        // when
+        ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        //then
+        assertEquals(testAuthenticatedUser, actual);
+        Mockito.verify(sut.userSvc, Mockito.atLeastOnce()).updateLastApiUseTime(testAuthenticatedUser);
+
+    }
+    @Test
+    void testFindUserFromRequest_oneProvider_validToken_noAccount() throws WrappedAuthErrorResponse, ParseException, IOException {
+        OIDCAuthProvider oidcAuthProvider = Mockito.mock(OIDCAuthProvider.class);
+        String providerID = "OIEDC";
+        Mockito.when(oidcAuthProvider.getId()).thenReturn(providerID);
+        // ensure that a valid OIDCAuthProvider is available within the AuthenticationServiceBean
+        Mockito.when(sut.authSvc.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Collections.singleton(providerID));
+        Mockito.when(sut.authSvc.getAuthenticationProvider(providerID)).thenReturn(oidcAuthProvider);
+
+        // ensure that the OIDCAuthProvider returns a valid UserRecordIdentifier for a given Token
+        UserRecordIdentifier userinfo = new UserRecordIdentifier(providerID, "KEY");
+        BearerAccessToken token = BearerAccessToken.parse("Bearer " + TEST_API_KEY);
+        Mockito.when(oidcAuthProvider.getUserIdentifier(token)).thenReturn(Optional.of(userinfo));
+
+        // ensures that the AuthenticationServiceBean can retrieve an Authenticated user based on the UserRecordIdentifier
+        Mockito.when(sut.authSvc.lookupUser(userinfo)).thenReturn(null);
+
+
+        // when
+        ContainerRequestContext testContainerRequest = new BearerTokenKeyContainerRequestTestFake("Bearer " + TEST_API_KEY);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        //then
+        assertNull(actual);
+
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanismTest.java
new file mode 100644
index 00000000000..b3435d53ca2
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/CompoundAuthMechanismTest.java
@@ -0,0 +1,49 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.api.auth.doubles.ContainerRequestTestFake;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.GuestUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+
+public class CompoundAuthMechanismTest {
+
+    @Test
+    public void testFindUserFromRequest_CanNotAuthenticateUserWithAnyMechanism() throws WrappedAuthErrorResponse {
+        AuthMechanism authMechanismStub1 = Mockito.mock(AuthMechanism.class);
+        Mockito.when(authMechanismStub1.findUserFromRequest(any(ContainerRequestContext.class))).thenReturn(null);
+
+        AuthMechanism authMechanismStub2 = Mockito.mock(AuthMechanism.class);
+        Mockito.when(authMechanismStub2.findUserFromRequest(any(ContainerRequestContext.class))).thenReturn(null);
+
+        CompoundAuthMechanism sut = new CompoundAuthMechanism(authMechanismStub1, authMechanismStub2);
+
+        User actual = sut.findUserFromRequest(new ContainerRequestTestFake());
+
+        assertThat(actual, equalTo(GuestUser.get()));
+    }
+
+    @Test
+    public void testFindUserFromRequest_UserAuthenticated() throws WrappedAuthErrorResponse {
+        AuthMechanism authMechanismStub1 = Mockito.mock(AuthMechanism.class);
+        AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser();
+        Mockito.when(authMechanismStub1.findUserFromRequest(any(ContainerRequestContext.class))).thenReturn(testAuthenticatedUser);
+
+        AuthMechanism authMechanismStub2 = Mockito.mock(AuthMechanism.class);
+        Mockito.when(authMechanismStub2.findUserFromRequest(any(ContainerRequestContext.class))).thenReturn(null);
+
+        CompoundAuthMechanism sut = new CompoundAuthMechanism(authMechanismStub1, authMechanismStub2);
+
+        User actual = sut.findUserFromRequest(new ContainerRequestTestFake());
+
+        assertEquals(actual, testAuthenticatedUser);
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java
new file mode 100644
index 00000000000..74a7d239c05
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/SessionCookieAuthMechanismTest.java
@@ -0,0 +1,49 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.DataverseSession;
+import edu.harvard.iq.dataverse.api.auth.doubles.ContainerRequestTestFake;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+
+@LocalJvmSettings
+class SessionCookieAuthMechanismTest {
+
+    private SessionCookieAuthMechanism sut;
+
+    @BeforeEach
+    public void setUp() {
+        sut = new SessionCookieAuthMechanism();
+    }
+
+    @Test
+    @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "false", varArgs = "api-session-auth")
+    void testFindUserFromRequest_FeatureFlagDisabled() throws WrappedAuthErrorResponse {
+        sut.session = Mockito.mock(DataverseSession.class);
+
+        User actual = sut.findUserFromRequest(new ContainerRequestTestFake());
+
+        assertNull(actual);
+    }
+
+    @Test
+    @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-session-auth")
+    void testFindUserFromRequest_FeatureFlagEnabled_UserAuthenticated() throws WrappedAuthErrorResponse {
+        DataverseSession dataverseSessionStub = Mockito.mock(DataverseSession.class);
+        User testAuthenticatedUser = new AuthenticatedUser();
+        Mockito.when(dataverseSessionStub.getUser()).thenReturn(testAuthenticatedUser);
+        sut.session = dataverseSessionStub;
+
+        User actual = sut.findUserFromRequest(new ContainerRequestTestFake());
+
+        assertEquals(testAuthenticatedUser, actual);
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java
new file mode 100644
index 00000000000..74db6e544da
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/SignedUrlAuthMechanismTest.java
@@ -0,0 +1,99 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.api.auth.doubles.SignedUrlContainerRequestTestFake;
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.users.ApiToken;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+
+import static edu.harvard.iq.dataverse.api.auth.SignedUrlAuthMechanism.RESPONSE_MESSAGE_BAD_SIGNED_URL;
+import static org.junit.jupiter.api.Assertions.*;
+
+public class SignedUrlAuthMechanismTest {
+
+    private static final String TEST_SIGNED_URL_TOKEN = "test-signed-url-token";
+    private static final String TEST_SIGNED_URL_USER_ID = "test-user";
+
+    private SignedUrlAuthMechanism sut;
+
+    private final AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser();
+
+    @BeforeEach
+    public void setUp() {
+        sut = new SignedUrlAuthMechanism();
+    }
+
+    @Test
+    public void testFindUserFromRequest_SignedUrlTokenNotProvided() throws WrappedAuthErrorResponse {
+        sut.authSvc = Mockito.mock(AuthenticationServiceBean.class);
+
+        ContainerRequestContext testContainerRequest = new SignedUrlContainerRequestTestFake(null, null);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertNull(actual);
+    }
+
+    @Test
+    public void testFindUserFromRequest_SignedUrlTokenProvided_UserExists_ValidSignedUrl_UserAuthenticated() throws WrappedAuthErrorResponse {
+        AuthenticationServiceBean authenticationServiceBeanStub = Mockito.mock(AuthenticationServiceBean.class);
+        Mockito.when(authenticationServiceBeanStub.getAuthenticatedUser(TEST_SIGNED_URL_USER_ID)).thenReturn(testAuthenticatedUser);
+        ApiToken apiTokenStub = Mockito.mock(ApiToken.class);
+        Mockito.when(apiTokenStub.getTokenString()).thenReturn(TEST_SIGNED_URL_TOKEN);
+        Mockito.when(authenticationServiceBeanStub.findApiTokenByUser(testAuthenticatedUser)).thenReturn(apiTokenStub);
+
+        sut.authSvc = authenticationServiceBeanStub;
+
+        ContainerRequestContext testContainerRequest = new SignedUrlContainerRequestTestFake(TEST_SIGNED_URL_TOKEN, TEST_SIGNED_URL_USER_ID);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertEquals(testAuthenticatedUser, actual);
+    }
+
+    @Test
+    public void testFindUserFromRequest_SignedUrlTokenProvided_UserExists_InvalidSignedUrl_UserNotAuthenticated() {
+        AuthenticationServiceBean authenticationServiceBeanStub = Mockito.mock(AuthenticationServiceBean.class);
+        Mockito.when(authenticationServiceBeanStub.getAuthenticatedUser(TEST_SIGNED_URL_USER_ID)).thenReturn(testAuthenticatedUser);
+        ApiToken apiTokenStub = Mockito.mock(ApiToken.class);
+        Mockito.when(apiTokenStub.getTokenString()).thenReturn("different-token-from-the-signed-url");
+        Mockito.when(authenticationServiceBeanStub.findApiTokenByUser(testAuthenticatedUser)).thenReturn(apiTokenStub);
+
+        sut.authSvc = authenticationServiceBeanStub;
+
+        ContainerRequestContext testContainerRequest = new SignedUrlContainerRequestTestFake(TEST_SIGNED_URL_TOKEN, TEST_SIGNED_URL_USER_ID);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedAuthErrorResponse.getMessage());
+    }
+
+    @Test
+    public void testFindUserFromRequest_SignedUrlTokenProvided_UserExists_UserApiTokenDoesNotExist_UserNotAuthenticated() {
+        AuthenticationServiceBean authenticationServiceBeanStub = Mockito.mock(AuthenticationServiceBean.class);
+        Mockito.when(authenticationServiceBeanStub.getAuthenticatedUser(TEST_SIGNED_URL_USER_ID)).thenReturn(testAuthenticatedUser);
+        Mockito.when(authenticationServiceBeanStub.findApiTokenByUser(testAuthenticatedUser)).thenReturn(null);
+
+        sut.authSvc = authenticationServiceBeanStub;
+
+        ContainerRequestContext testContainerRequest = new SignedUrlContainerRequestTestFake(TEST_SIGNED_URL_TOKEN, TEST_SIGNED_URL_USER_ID);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedAuthErrorResponse.getMessage());
+    }
+
+    @Test
+    public void testFindUserFromRequest_SignedUrlTokenProvided_UserDoesNotExistForTheGivenId_UserNotAuthenticated() {
+        AuthenticationServiceBean authenticationServiceBeanStub = Mockito.mock(AuthenticationServiceBean.class);
+        Mockito.when(authenticationServiceBeanStub.getAuthenticatedUser(TEST_SIGNED_URL_USER_ID)).thenReturn(null);
+
+        sut.authSvc = authenticationServiceBeanStub;
+
+        ContainerRequestContext testContainerRequest = new SignedUrlContainerRequestTestFake(TEST_SIGNED_URL_TOKEN, TEST_SIGNED_URL_USER_ID);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        assertEquals(RESPONSE_MESSAGE_BAD_SIGNED_URL, wrappedAuthErrorResponse.getMessage());
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java
new file mode 100644
index 00000000000..3f90fa73fa9
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/WorkflowKeyAuthMechanismTest.java
@@ -0,0 +1,61 @@
+package edu.harvard.iq.dataverse.api.auth;
+
+import edu.harvard.iq.dataverse.api.auth.doubles.WorkflowKeyContainerRequestTestFake;
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+
+import static edu.harvard.iq.dataverse.api.auth.WorkflowKeyAuthMechanism.RESPONSE_MESSAGE_BAD_WORKFLOW_KEY;
+import static org.junit.jupiter.api.Assertions.*;
+
+public class WorkflowKeyAuthMechanismTest {
+
+    private static final String TEST_WORKFLOW_KEY = "test-workflow-key";
+
+    private WorkflowKeyAuthMechanism sut;
+
+    @BeforeEach
+    public void setUp() {
+        sut = new WorkflowKeyAuthMechanism();
+    }
+
+    @Test
+    public void testFindUserFromRequest_WorkflowKeyNotProvided() throws WrappedAuthErrorResponse {
+        sut.authSvc = Mockito.mock(AuthenticationServiceBean.class);
+
+        ContainerRequestContext testContainerRequest = new WorkflowKeyContainerRequestTestFake(null);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertNull(actual);
+    }
+
+    @Test
+    public void testFindUserFromRequest_WorkflowKeyProvided_UserAuthenticated() throws WrappedAuthErrorResponse {
+        AuthenticationServiceBean authenticationServiceBeanStub = Mockito.mock(AuthenticationServiceBean.class);
+        AuthenticatedUser testAuthenticatedUser = new AuthenticatedUser();
+        Mockito.when(authenticationServiceBeanStub.lookupUserForWorkflowInvocationID(TEST_WORKFLOW_KEY)).thenReturn(testAuthenticatedUser);
+        sut.authSvc = authenticationServiceBeanStub;
+
+        ContainerRequestContext testContainerRequest = new WorkflowKeyContainerRequestTestFake(TEST_WORKFLOW_KEY);
+        User actual = sut.findUserFromRequest(testContainerRequest);
+
+        assertEquals(testAuthenticatedUser, actual);
+    }
+
+    @Test
+    public void testFindUserFromRequest_WorkflowKeyProvided_UserNotAuthenticated() {
+        AuthenticationServiceBean authenticationServiceBeanStub = Mockito.mock(AuthenticationServiceBean.class);
+        Mockito.when(authenticationServiceBeanStub.lookupUserForWorkflowInvocationID(TEST_WORKFLOW_KEY)).thenReturn(null);
+        sut.authSvc = authenticationServiceBeanStub;
+
+        ContainerRequestContext testContainerRequest = new WorkflowKeyContainerRequestTestFake(TEST_WORKFLOW_KEY);
+        WrappedAuthErrorResponse wrappedAuthErrorResponse = assertThrows(WrappedAuthErrorResponse.class, () -> sut.findUserFromRequest(testContainerRequest));
+
+        assertEquals(RESPONSE_MESSAGE_BAD_WORKFLOW_KEY, wrappedAuthErrorResponse.getMessage());
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyContainerRequestTestFake.java
new file mode 100644
index 00000000000..3afa1a06be3
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyContainerRequestTestFake.java
@@ -0,0 +1,29 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import jakarta.ws.rs.core.UriInfo;
+
+import static edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism.DATAVERSE_API_KEY_REQUEST_HEADER_NAME;
+
+public class ApiKeyContainerRequestTestFake extends ContainerRequestTestFake {
+
+    private final String apiKey;
+    private final UriInfo uriInfo;
+
+    public ApiKeyContainerRequestTestFake(String apiKey, String path) {
+        this.apiKey = apiKey;
+        this.uriInfo = new ApiKeyUriInfoTestFake(apiKey, path);
+    }
+
+    @Override
+    public UriInfo getUriInfo() {
+        return uriInfo;
+    }
+
+    @Override
+    public String getHeaderString(String s) {
+        if (s.equals(DATAVERSE_API_KEY_REQUEST_HEADER_NAME)) {
+            return this.apiKey;
+        }
+        return null;
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyUriInfoTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyUriInfoTestFake.java
new file mode 100644
index 00000000000..495d3810adb
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ApiKeyUriInfoTestFake.java
@@ -0,0 +1,29 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
+
+import static edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism.DATAVERSE_API_KEY_REQUEST_PARAM_NAME;
+
+public class ApiKeyUriInfoTestFake extends UriInfoTestFake {
+
+    private final String apiKey;
+    private final String path;
+
+    public ApiKeyUriInfoTestFake(String apiKey, String path) {
+        this.apiKey = apiKey;
+        this.path = path;
+    }
+
+    @Override
+    public String getPath() {
+        return path;
+    }
+
+    @Override
+    public MultivaluedMap<String, String> getQueryParameters() {
+        MultivaluedMap<String, String> queryParameters = new MultivaluedHashMap<>();
+        queryParameters.add(DATAVERSE_API_KEY_REQUEST_PARAM_NAME, apiKey);
+        return queryParameters;
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/BearerTokenKeyContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/BearerTokenKeyContainerRequestTestFake.java
new file mode 100644
index 00000000000..04a66e851c5
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/BearerTokenKeyContainerRequestTestFake.java
@@ -0,0 +1,20 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import jakarta.ws.rs.core.HttpHeaders;
+
+public class BearerTokenKeyContainerRequestTestFake extends ContainerRequestTestFake {
+
+    private final String apiKey;
+
+    public BearerTokenKeyContainerRequestTestFake(String apiKey) {
+        this.apiKey = apiKey;
+    }
+
+    @Override
+    public String getHeaderString(String s) {
+        if (s.equals(HttpHeaders.AUTHORIZATION)) {
+            return this.apiKey;
+        }
+        return null;
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ContainerRequestTestFake.java
new file mode 100644
index 00000000000..74f2e9dbb41
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/ContainerRequestTestFake.java
@@ -0,0 +1,135 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import jakarta.ws.rs.container.ContainerRequestContext;
+import jakarta.ws.rs.core.*;
+import java.io.InputStream;
+import java.net.URI;
+import java.util.*;
+
+public class ContainerRequestTestFake implements ContainerRequestContext {
+
+    @Override
+    public Object getProperty(String s) {
+        return null;
+    }
+
+    @Override
+    public Collection<String> getPropertyNames() {
+        return null;
+    }
+
+    @Override
+    public void setProperty(String s, Object o) {
+
+    }
+
+    @Override
+    public void removeProperty(String s) {
+
+    }
+
+    @Override
+    public UriInfo getUriInfo() {
+        return null;
+    }
+
+    @Override
+    public void setRequestUri(URI uri) {
+
+    }
+
+    @Override
+    public void setRequestUri(URI uri, URI uri1) {
+
+    }
+
+    @Override
+    public Request getRequest() {
+        return null;
+    }
+
+    @Override
+    public String getMethod() {
+        return null;
+    }
+
+    @Override
+    public void setMethod(String s) {
+
+    }
+
+    @Override
+    public MultivaluedMap<String, String> getHeaders() {
+        return null;
+    }
+
+    @Override
+    public String getHeaderString(String s) {
+        return null;
+    }
+
+    @Override
+    public Date getDate() {
+        return null;
+    }
+
+    @Override
+    public Locale getLanguage() {
+        return null;
+    }
+
+    @Override
+    public int getLength() {
+        return 0;
+    }
+
+    @Override
+    public MediaType getMediaType() {
+        return null;
+    }
+
+    @Override
+    public List<MediaType> getAcceptableMediaTypes() {
+        return null;
+    }
+
+    @Override
+    public List<Locale> getAcceptableLanguages() {
+        return null;
+    }
+
+    @Override
+    public Map<String, Cookie> getCookies() {
+        return null;
+    }
+
+    @Override
+    public boolean hasEntity() {
+        return false;
+    }
+
+    @Override
+    public InputStream getEntityStream() {
+        return null;
+    }
+
+    @Override
+    public void setEntityStream(InputStream inputStream) {
+
+    }
+
+    @Override
+    public SecurityContext getSecurityContext() {
+        return null;
+    }
+
+    @Override
+    public void setSecurityContext(SecurityContext securityContext) {
+
+    }
+
+    @Override
+    public void abortWith(Response response) {
+
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlContainerRequestTestFake.java
new file mode 100644
index 00000000000..df37f6723d3
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlContainerRequestTestFake.java
@@ -0,0 +1,16 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import jakarta.ws.rs.core.UriInfo;
+
+public class SignedUrlContainerRequestTestFake extends ContainerRequestTestFake {
+    private final UriInfo uriInfo;
+
+    public SignedUrlContainerRequestTestFake(String signedUrlToken, String signedUrlUserId) {
+        this.uriInfo = new SignedUrlUriInfoTestFake(signedUrlToken, signedUrlUserId);
+    }
+
+    @Override
+    public UriInfo getUriInfo() {
+        return uriInfo;
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlUriInfoTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlUriInfoTestFake.java
new file mode 100644
index 00000000000..fa9da7fc8de
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/SignedUrlUriInfoTestFake.java
@@ -0,0 +1,40 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import edu.harvard.iq.dataverse.util.UrlSignerUtil;
+
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
+
+import java.net.URI;
+
+import static edu.harvard.iq.dataverse.util.UrlSignerUtil.SIGNED_URL_TOKEN;
+import static edu.harvard.iq.dataverse.util.UrlSignerUtil.SIGNED_URL_USER;
+import static jakarta.ws.rs.HttpMethod.GET;
+
+public class SignedUrlUriInfoTestFake extends UriInfoTestFake {
+
+    private final String signedUrlToken;
+    private final String signedUrlUserId;
+
+    private static final String SIGNED_URL_BASE_URL = "http://localhost:8080/api/test1";
+    private static final Integer SIGNED_URL_TIMEOUT = 1000;
+
+
+    public SignedUrlUriInfoTestFake(String signedUrlToken, String signedUrlUserId) {
+        this.signedUrlToken = signedUrlToken;
+        this.signedUrlUserId = signedUrlUserId;
+    }
+
+    @Override
+    public URI getRequestUri() {
+        return URI.create(UrlSignerUtil.signUrl(SIGNED_URL_BASE_URL, SIGNED_URL_TIMEOUT, signedUrlUserId, GET, signedUrlToken));
+    }
+
+    @Override
+    public MultivaluedMap<String, String> getQueryParameters() {
+        MultivaluedMap<String, String> queryParameters = new MultivaluedHashMap<>();
+        queryParameters.add(SIGNED_URL_TOKEN, signedUrlToken);
+        queryParameters.add(SIGNED_URL_USER, signedUrlUserId);
+        return queryParameters;
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/UriInfoTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/UriInfoTestFake.java
new file mode 100644
index 00000000000..51d20083ec8
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/UriInfoTestFake.java
@@ -0,0 +1,106 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.PathSegment;
+import jakarta.ws.rs.core.UriBuilder;
+import jakarta.ws.rs.core.UriInfo;
+import java.net.URI;
+import java.util.List;
+
+public class UriInfoTestFake implements UriInfo {
+
+    @Override
+    public String getPath() {
+        return null;
+    }
+
+    @Override
+    public String getPath(boolean b) {
+        return null;
+    }
+
+    @Override
+    public List<PathSegment> getPathSegments() {
+        return null;
+    }
+
+    @Override
+    public List<PathSegment> getPathSegments(boolean b) {
+        return null;
+    }
+
+    @Override
+    public URI getRequestUri() {
+        return null;
+    }
+
+    @Override
+    public UriBuilder getRequestUriBuilder() {
+        return null;
+    }
+
+    @Override
+    public URI getAbsolutePath() {
+        return null;
+    }
+
+    @Override
+    public UriBuilder getAbsolutePathBuilder() {
+        return null;
+    }
+
+    @Override
+    public URI getBaseUri() {
+        return null;
+    }
+
+    @Override
+    public UriBuilder getBaseUriBuilder() {
+        return null;
+    }
+
+    @Override
+    public MultivaluedMap<String, String> getPathParameters() {
+        return null;
+    }
+
+    @Override
+    public MultivaluedMap<String, String> getPathParameters(boolean b) {
+        return null;
+    }
+
+    @Override
+    public MultivaluedMap<String, String> getQueryParameters() {
+        return null;
+    }
+
+    @Override
+    public MultivaluedMap<String, String> getQueryParameters(boolean b) {
+        return null;
+    }
+
+    @Override
+    public List<String> getMatchedURIs() {
+        return null;
+    }
+
+    @Override
+    public List<String> getMatchedURIs(boolean b) {
+        return null;
+    }
+
+    @Override
+    public List<Object> getMatchedResources() {
+        return null;
+    }
+
+    @Override
+    public URI resolve(URI uri) {
+        return null;
+    }
+
+    @Override
+    public URI relativize(URI uri) {
+        return null;
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyContainerRequestTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyContainerRequestTestFake.java
new file mode 100644
index 00000000000..2679ab1cc1d
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyContainerRequestTestFake.java
@@ -0,0 +1,29 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import jakarta.ws.rs.core.UriInfo;
+
+import static edu.harvard.iq.dataverse.api.auth.WorkflowKeyAuthMechanism.DATAVERSE_WORKFLOW_KEY_REQUEST_HEADER_NAME;
+
+public class WorkflowKeyContainerRequestTestFake extends ContainerRequestTestFake {
+
+    private final String workflowKey;
+    private final UriInfo uriInfo;
+
+    public WorkflowKeyContainerRequestTestFake(String workflowKey) {
+        this.workflowKey = workflowKey;
+        this.uriInfo = new WorkflowKeyUriInfoTestFake(workflowKey);
+    }
+
+    @Override
+    public UriInfo getUriInfo() {
+        return uriInfo;
+    }
+
+    @Override
+    public String getHeaderString(String s) {
+        if (s.equals(DATAVERSE_WORKFLOW_KEY_REQUEST_HEADER_NAME)) {
+            return this.workflowKey;
+        }
+        return null;
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyUriInfoTestFake.java b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyUriInfoTestFake.java
new file mode 100644
index 00000000000..7bfd4326417
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/auth/doubles/WorkflowKeyUriInfoTestFake.java
@@ -0,0 +1,22 @@
+package edu.harvard.iq.dataverse.api.auth.doubles;
+
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
+
+import static edu.harvard.iq.dataverse.api.auth.WorkflowKeyAuthMechanism.DATAVERSE_WORKFLOW_KEY_REQUEST_PARAM_NAME;
+
+public class WorkflowKeyUriInfoTestFake extends UriInfoTestFake {
+
+    private final String workflowKey;
+
+    public WorkflowKeyUriInfoTestFake(String workflowKey) {
+        this.workflowKey = workflowKey;
+    }
+
+    @Override
+    public MultivaluedMap<String, String> getQueryParameters() {
+        MultivaluedMap<String, String> queryParameters = new MultivaluedHashMap<>();
+        queryParameters.add(DATAVERSE_WORKFLOW_KEY_REQUEST_PARAM_NAME, workflowKey);
+        return queryParameters;
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/dto/FieldDTOTest.java b/src/test/java/edu/harvard/iq/dataverse/api/dto/FieldDTOTest.java
index 45f0ef52b33..0202f11d469 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/dto/FieldDTOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/dto/FieldDTOTest.java
@@ -11,12 +11,12 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-import junit.framework.Assert;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  *
@@ -27,15 +27,15 @@ public class FieldDTOTest {
     public FieldDTOTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
        
         Set<FieldDTO> authorFields = new HashSet<>();
@@ -49,7 +49,7 @@ public void setUp() {
         
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
@@ -60,7 +60,7 @@ public void tearDown() {
     public void testSinglePrimitive() {
         FieldDTO affil = FieldDTO.createPrimitiveFieldDTO("authorAffiliation", "Top");
         System.out.println(affil.getSinglePrimitive());
-        Assert.assertEquals("Top", affil.getSinglePrimitive());
+        assertEquals("Top", affil.getSinglePrimitive());
         
     }
 
@@ -78,10 +78,10 @@ public void testMultipleVocab() {
         value.add("EventList");
         astroType.setMultipleVocab(value);
         
-        Assert.assertEquals(value, astroType.getMultipleVocab());
+        assertEquals(value, astroType.getMultipleVocab());
         String jsonStr = gson.toJson(astroType);
         FieldDTO astroType2 = gson.fromJson(jsonStr, FieldDTO.class);
-        Assert.assertEquals(astroType, astroType2);
+        assertEquals(astroType, astroType2);
         
     }
 
@@ -116,7 +116,7 @@ public void testSetMultipleCompound() {
         compoundField.setTypeName("author");
         compoundField.setMultipleCompound(authorList);
         
-        Assert.assertEquals(compoundField.getMultipleCompound(), authorList);
+        assertEquals(compoundField.getMultipleCompound(), authorList);
     }
 
     /**
@@ -132,8 +132,8 @@ public void testSetSingleCompound() {
         
         FieldDTO compoundField = new FieldDTO();
         compoundField.setSingleCompound(authorFields.toArray(new FieldDTO[]{}));
-        Set<FieldDTO>  returned = compoundField.getSingleCompound();   
-        Assert.assertTrue(returned.equals(authorFields));
+        Set<FieldDTO>  returned = compoundField.getSingleCompound();
+        assertEquals(returned, authorFields);
        
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/filesystem/FileRecordJobIT.java b/src/test/java/edu/harvard/iq/dataverse/api/filesystem/FileRecordJobIT.java
index 7b9f7d5c155..3257204f460 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/filesystem/FileRecordJobIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/filesystem/FileRecordJobIT.java
@@ -20,24 +20,25 @@
 */
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Response;
+import io.restassured.RestAssured;
+import io.restassured.http.ContentType;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
 import edu.harvard.iq.dataverse.api.UtilIT;
 import edu.harvard.iq.dataverse.authorization.DataverseRole;
 import edu.harvard.iq.dataverse.batch.entities.JobExecutionEntity;
 import edu.harvard.iq.dataverse.batch.entities.StepExecutionEntity;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.Ignore;
-
-import javax.batch.runtime.BatchStatus;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+
+import jakarta.batch.runtime.BatchStatus;
+
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileInputStream;
@@ -55,11 +56,11 @@
 import java.util.Random;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertTrue;
-import static junit.framework.Assert.fail;
+import static io.restassured.RestAssured.given;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.hamcrest.Matchers.equalTo;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 /**
  * Batch File System Import Job Integration Tests
@@ -92,7 +93,7 @@ public class FileRecordJobIT {
     private static final String API_TOKEN_HTTP_HEADER = "X-Dataverse-key";
     private static final String BUILTIN_USER_KEY = "burrito";
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() throws Exception {
 
         // this allows for testing on dataverse staging servers via jvm setting
@@ -122,7 +123,7 @@ public static void setUpClass() throws Exception {
         }
     }
 
-    @Before
+    @BeforeEach
     public void setUpDataverse() {
 
         try {
@@ -189,12 +190,12 @@ public void setUpDataverse() {
         }
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
         RestAssured.reset();
     }
 
-    @After
+    @AfterEach
     public void tearDownDataverse() {
         try {
 
@@ -231,7 +232,7 @@ public void tearDownDataverse() {
      * Ignores failed checksum manifest import.
      */
     @Test
-    @Ignore
+    @Disabled
     public void testSameFileInDifferentDirectories() {
 
         try {
@@ -310,7 +311,7 @@ public void testSameFileInDifferentDirectories() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     public void testNewEditor() {
 
         try {
@@ -414,7 +415,7 @@ public void testNewEditor() {
      * Ignores failed checksum manifest import.
      */
     @Test
-    @Ignore
+    @Disabled
     public void testSameFileInDifferentDirectoriesUnauthorizedUser() {
 
         try {
@@ -613,7 +614,7 @@ public void testSameFileInDifferentDirectoriesUnauthorizedUser() {
 //    }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * Add a file in MERGE mode (default), should only need to commit the new file
      */
@@ -758,7 +759,7 @@ public void testAddingFilesInMergeMode() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * The success case: all files uploaded and present in checksum manifest
      */
@@ -827,7 +828,7 @@ public void testFilesWithChecksumManifest() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * No checksum manifest found
      */
@@ -881,7 +882,7 @@ public void testFilesWithoutChecksumManifest() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * Checksum manifest is missing an uploaded file
      */
@@ -948,7 +949,7 @@ public void testFileMissingInChecksumManifest() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * Checksum manifest references a file that isn't present, it should return failed status and detailed 
      * message in persistentUserData
@@ -1020,7 +1021,7 @@ public void testFileInChecksumManifestDoesntExist() {
     }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * Published datasets should not allow import jobs for now since it isn't in DRAFT mode
      */
@@ -1102,7 +1103,7 @@ public void testPublishedDataset() {
 //    }
 
     @Test
-    @Ignore
+    @Disabled
     /**
      * No dataset found responses (bad dataset id, etc.)
      */
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
index 70c53c8c9b9..44739f3f62a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/imports/ImportGenericServiceBeanTest.java
@@ -1,13 +1,14 @@
 package edu.harvard.iq.dataverse.api.imports;
 
 import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.InjectMocks;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
 
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class ImportGenericServiceBeanTest {
 
     @InjectMocks
@@ -16,18 +17,18 @@ public class ImportGenericServiceBeanTest {
     @Test
     public void testReassignIdentifierAsGlobalId() {
         // non-URL
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("doi:10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("hdl:10.7910/DVN/TJCLKP", new DatasetDTO()));
         // HTTPS
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://dx.doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://dx.doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("https://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
         // HTTP (no S)
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://dx.doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
-        Assert.assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("doi:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://dx.doi.org/10.7910/DVN/TJCLKP", new DatasetDTO()));
+        assertEquals("hdl:10.7910/DVN/TJCLKP", importGenericService.reassignIdentifierAsGlobalId("http://hdl.handle.net/10.7910/DVN/TJCLKP", new DatasetDTO()));
         // junk
-        Assert.assertEquals(null, importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO()));
+        assertNull(importGenericService.reassignIdentifierAsGlobalId("junk", new DatasetDTO()));
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilderTest.java b/src/test/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilderTest.java
index a6da689da7a..51586127041 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/util/JsonResponseBuilderTest.java
@@ -7,7 +7,7 @@
 import org.junit.jupiter.params.provider.ValueSource;
 import org.mockito.Mockito;
 
-import javax.servlet.http.HttpServletRequest;
+import jakarta.servlet.http.HttpServletRequest;
 
 import static org.junit.jupiter.api.Assertions.*;
 import static org.mockito.Mockito.*;
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthUtilTest.java
index a7a33d0c1bd..74f6d714a5e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthUtilTest.java
@@ -5,83 +5,57 @@
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.GoogleOAuth2AP;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.impl.OrcidOAuth2AP;
 import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider;
-import java.util.Arrays;
 import java.util.Collection;
 import java.util.HashSet;
 
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-@RunWith(Enclosed.class)
 public class AuthUtilTest {
-
-    @RunWith(Parameterized.class)
-    public static class AuthUtilParamTests {
-
-        @Parameters
-        public static Collection<String[]> data() {
-            return Arrays.asList(
-                    new String[][] {
-                        { null, null, null },
-                        { "Homer", "Homer", null },
-                        { "Simpson", null, "Simpson" },
-                        { "Homer Simpson", "Homer", "Simpson" },
-                        { "Homer Simpson", " Homer", "Simpson" }
-                    }
-                );
-        }
-
-        @Parameter
-        public String expectedDisplayName;
-
-        @Parameter(1)
-        public String displayFirst;
-
-        @Parameter(2)
-        public String displayLast;
-
-        @Test
-        public void testGetDisplayName() {
-            assertEquals(expectedDisplayName, AuthUtil.getDisplayName(displayFirst, displayLast));
-        }
+    
+    @ParameterizedTest
+    @CsvSource(value = {
+        "NULL,NULL,NULL",
+        "Homer,Homer,NULL",
+        "Simpson,NULL,Simpson",
+        "Homer Simpson,Homer,Simpson",
+        "Homer Simpson,Homer,Simpson"
+    }, nullValues = "NULL")
+    void testGetDisplayName(String expectedDisplayName, String displayFirst, String displayLast) {
+        assertEquals(expectedDisplayName, AuthUtil.getDisplayName(displayFirst, displayLast));
     }
-
-    public static class AuthUtilNoParamTests {
-
-        /**
-         * Test of isNonLocalLoginEnabled method, of class AuthUtil.
-         */
-        @Test
-        public void testIsNonLocalLoginEnabled() {
-            System.out.println("isNonLocalLoginEnabled");
-
-            AuthUtil authUtil = new AuthUtil();
-
-            assertEquals(false, AuthUtil.isNonLocalLoginEnabled(null));
-
-            Collection<AuthenticationProvider> shibOnly = new HashSet<>();
-            shibOnly.add(new ShibAuthenticationProvider());
-            assertEquals(true, AuthUtil.isNonLocalLoginEnabled(shibOnly));
-
-            Collection<AuthenticationProvider> manyNonLocal = new HashSet<>();
-            manyNonLocal.add(new ShibAuthenticationProvider());
-            manyNonLocal.add(new GitHubOAuth2AP(null, null));
-            manyNonLocal.add(new GoogleOAuth2AP(null, null));
-            manyNonLocal.add(new OrcidOAuth2AP(null, null, null));
-            assertEquals(true, AuthUtil.isNonLocalLoginEnabled(manyNonLocal));
-
-            Collection<AuthenticationProvider> onlyBuiltin = new HashSet<>();
-            onlyBuiltin.add(new BuiltinAuthenticationProvider(null, null, null));
-            // only builtin provider
-            assertEquals(false, AuthUtil.isNonLocalLoginEnabled(onlyBuiltin));
-
-        }
+    
+    /**
+     * Test of isNonLocalLoginEnabled method, of class AuthUtil.
+     */
+    @Test
+    public void testIsNonLocalLoginEnabled() {
+        System.out.println("isNonLocalLoginEnabled");
+        
+        AuthUtil authUtil = new AuthUtil();
+        
+        assertFalse(AuthUtil.isNonLocalLoginEnabled(null));
+        
+        Collection<AuthenticationProvider> shibOnly = new HashSet<>();
+        shibOnly.add(new ShibAuthenticationProvider());
+        assertTrue(AuthUtil.isNonLocalLoginEnabled(shibOnly));
+        
+        Collection<AuthenticationProvider> manyNonLocal = new HashSet<>();
+        manyNonLocal.add(new ShibAuthenticationProvider());
+        manyNonLocal.add(new GitHubOAuth2AP(null, null));
+        manyNonLocal.add(new GoogleOAuth2AP(null, null));
+        manyNonLocal.add(new OrcidOAuth2AP(null, null, null));
+        assertTrue(AuthUtil.isNonLocalLoginEnabled(manyNonLocal));
+        
+        Collection<AuthenticationProvider> onlyBuiltin = new HashSet<>();
+        onlyBuiltin.add(new BuiltinAuthenticationProvider(null, null, null));
+        // only builtin provider
+        assertFalse(AuthUtil.isNonLocalLoginEnabled(onlyBuiltin));
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserDisplayInfoTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserDisplayInfoTest.java
index c22536e7616..4f04228df71 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserDisplayInfoTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/AuthenticatedUserDisplayInfoTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.authorization;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBeanTest.java
index ea9f851f9ed..27927c33420 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBeanTest.java
@@ -13,8 +13,9 @@
 import java.util.Set;
 import static java.util.stream.Collectors.toList;
 import java.util.stream.Stream;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
+
 import static edu.harvard.iq.dataverse.util.CollectionLiterals.*;
 
 /**
@@ -63,10 +64,10 @@ public void testFlattenGroupsCollection() throws GroupException {
                 
         List<Group> result = sut.flattenGroupsCollection(grps).collect(toList());
         
-        assertEquals( "Groups should appear only once", result.size(), new HashSet<>(result).size() );
+        assertEquals(result.size(), new HashSet<>(result).size(), "Groups should appear only once");
         
         grps.addAll( listOf(gAa, gAb, gAstar, AuthenticatedUsers.get()) );
-        assertEquals( "All groups should appear", grps, new HashSet<>(result) );
+        assertEquals(grps, new HashSet<>(result), "All groups should appear");
         
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtilTest.java
index fdfd8d4370c..41a494d5e55 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/GroupUtilTest.java
@@ -5,8 +5,8 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import java.util.LinkedHashSet;
 import java.util.Set;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class GroupUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupTest.java
index 543d3ab1eeb..afa07be2e38 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupTest.java
@@ -16,10 +16,10 @@
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.mocks.MockRoleAssigneeServiceBean;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.*;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -34,15 +34,14 @@ public class ExplicitGroupTest {
     public ExplicitGroupTest() {
     }
     
-    @Test( expected=GroupException.class )
+    @Test
     public void addGroupToSelf() throws Exception {
         ExplicitGroup sut = new ExplicitGroup();
         sut.setDisplayName("a group");
-        sut.add( sut );
-        fail("A group cannot be added to itself.");
+        assertThrows(GroupException.class, () -> sut.add( sut ), "A group cannot be added to itself.");
     }
     
-    @Test( expected=GroupException.class )
+    @Test
     public void addGroupToDescendant() throws GroupException{
         Dataverse dv = makeDataverse();
         ExplicitGroup root = new ExplicitGroup(prv);
@@ -60,11 +59,10 @@ public void addGroupToDescendant() throws GroupException{
         
         sub.add( subSub );
         root.add( sub );
-        subSub.add(root);
-        fail("A group cannot contain its parent");
+        assertThrows(GroupException.class, () -> subSub.add(root), "A group cannot contain its parent");
     }
     
-    @Test( expected=GroupException.class )
+    @Test
     public void addGroupToUnrealtedGroup() throws GroupException {
         Dataverse dv1 = makeDataverse();
         Dataverse dv2 = makeDataverse();
@@ -73,9 +71,8 @@ public void addGroupToUnrealtedGroup() throws GroupException {
         g1.setOwner(dv1);
         g2.setOwner(dv2);
         
-        g1.add(g2);
-        fail("An explicit group cannot contain an explicit group defined in "
-                + "a dataverse that's not an ancestor of that group's owner dataverse.");
+        assertThrows(GroupException.class, () -> g1.add(g2), "An explicit group cannot contain an" +
+            "explicit group defined in a dataverse that's not an ancestor of that group's owner dataverse.");
         
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupTest.java
index b6a3b862435..aeea93ad29e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupTest.java
@@ -5,8 +5,9 @@
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4AddressTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4AddressTest.java
index d03846a97b4..4683d66decd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4AddressTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv4AddressTest.java
@@ -1,10 +1,10 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
-import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.util.Arrays;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -22,9 +22,9 @@ public void testValueOf() {
         assertEquals( new IPv4Address(127,0,0,1), IPv4Address.valueOf("127.0.0.1") );
     }
     
-    @Test( expected=IllegalArgumentException.class )
-    public void testValueOf_bad() {
-        IPv4Address.valueOf("1.2.3");
+    @Test
+    void testValueOf_bad() {
+        assertThrows(IllegalArgumentException.class, () -> IPv4Address.valueOf("1.2.3"));
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6AddressTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6AddressTest.java
index 2070dc347e7..77618e558ec 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6AddressTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IPv6AddressTest.java
@@ -1,10 +1,11 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
 import java.util.Arrays;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -15,11 +16,11 @@ public class IPv6AddressTest {
     public IPv6AddressTest() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
@@ -37,15 +38,15 @@ public void testValueOfWithExpansion() {
         int[] expected = new int[]{0x2001, 0xdb8, 0x85a3, 0x0, 0, 0x8a2e, 0x370, 0x7334};
         IPv6Address adr = IPv6Address.valueOf("2001:db8:85a3::8a2e:370:7334");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
 
         expected = new int[]{0x2001, 0xdb8, 0x0, 0x0, 0x0, 0x0, 0x370, 0x7334};
         adr = IPv6Address.valueOf("2001:db8::370:7334");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
     }
 
@@ -54,16 +55,16 @@ public void testValueOfWithExpansionZerosAtStart() {
         int[] expected = new int[]{0, 0, 0, 0, 0, 0x8a2e, 0x370, 0x7334};
         IPv6Address adr = IPv6Address.valueOf("::8a2e:370:7334");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
 
         expected = new int[]{0, 0, 0, 0, 0, 0, 0, 0x7334};
         adr = IPv6Address.valueOf("::7334");
         System.out.println("adr = " + adr);
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
     }
 
@@ -72,15 +73,15 @@ public void testValueOfWithExpansionZerosAtEnd() {
         int[] expected = new int[]{0x2001, 0x8a2e, 0, 0, 0, 0, 0, 0};
         IPv6Address adr = IPv6Address.valueOf("2001:8a2e::");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
 
         expected = new int[]{0x1337, 0, 0, 0, 0, 0, 0, 0};
         adr = IPv6Address.valueOf("1337::");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
     }
 
@@ -90,15 +91,15 @@ public void testValueOfWithExpansionSpecialCases() {
         IPv6Address adr = IPv6Address.valueOf("::");
         System.out.println("adr = " + adr);
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
 
         expected = new int[]{0, 0, 0, 0, 0, 0, 0, 1};
         adr = IPv6Address.valueOf("::1");
         for (int i = 0; i < 8; i++) {
-            assertEquals("At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i),
-                    expected[i], adr.get(i));
+            assertEquals(expected[i], adr.get(i),
+                "At index " + i + ": expecting " + expected[i] + ", got " + adr.get(i));
         }
     }
 
@@ -108,24 +109,24 @@ public void testLocalhostness() {
         assertFalse(IPv6Address.valueOf("fff::1").isLocalhost());
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testIllegalLength() {
-        IPv6Address.valueOf("0:1:2:3");
+    @Test
+    void testIllegalLength() {
+        assertThrows(IllegalArgumentException.class, () -> IPv6Address.valueOf("0:1:2:3"));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testIllegalLengthPrefix() {
-        IPv6Address.valueOf(":1:2:3");
+    @Test
+    void testIllegalLengthPrefix() {
+        assertThrows(IllegalArgumentException.class, () -> IPv6Address.valueOf(":1:2:3"));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testIllegalLengthSuffix() {
-        IPv6Address.valueOf("1:2:3:");
+    @Test
+    void testIllegalLengthSuffix() {
+        assertThrows(IllegalArgumentException.class, () -> IPv6Address.valueOf("1:2:3:"));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testIllegalNumber() {
-        IPv6Address.valueOf("::xxx");
+    @Test
+    void testIllegalNumber() {
+        assertThrows(IllegalArgumentException.class, () -> IPv6Address.valueOf("::xxx"));
     }
 
     @Test
@@ -150,8 +151,7 @@ public void testLongRoundTrips() {
                 "fe80::8358:c945:7094:2e6c",
                 "fe80::60d0:6eff:fece:7713", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff")) {
             IPv6Address addr = IPv6Address.valueOf(s);
-            assertEquals("Bad roundtrip on address: " + s,
-                    addr, new IPv6Address(addr.toLongArray()));
+            assertEquals(addr, new IPv6Address(addr.toLongArray()), "Bad roundtrip on address: " + s);
         }
     }
 
@@ -165,11 +165,9 @@ public void testInclusionAbove() {
                 "dd:a:a:a:a:a:b:a", "dd:a:a:a:a:a:a:b")) {
             IPv6Address ipv6 = IPv6Address.valueOf(addr);
             assertFalse(r.contains(ipv6));
-            assertTrue("for address " + ipv6, above(ipv6.toLongArray(),
-                    r.getTop().toLongArray()));
-            assertFalse("for address " + ipv6, between(r.getBottom().toLongArray(),
-                    r.getTop().toLongArray(),
-                    ipv6.toLongArray()));
+            assertTrue(above(ipv6.toLongArray(), r.getTop().toLongArray()), "for address " + ipv6);
+            assertFalse(between(r.getBottom().toLongArray(), r.getTop().toLongArray(), ipv6.toLongArray()),
+                "for address " + ipv6);
 
         }
     }
@@ -188,9 +186,8 @@ public void testInclusionBelow() {
             long[] bottomArr = r.getBottom().toLongArray();
             long[] addrArr = ipv6.toLongArray();
 
-            assertTrue("for address " + ipv6, above(bottomArr, addrArr));
-            assertFalse("for address " + ipv6, between(bottomArr,
-                    r.getTop().toLongArray(), addrArr));
+            assertTrue(above(bottomArr, addrArr), "for address " + ipv6);
+            assertFalse(between(bottomArr, r.getTop().toLongArray(), addrArr), "for address " + ipv6);
 
         }
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRangeTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRangeTest.java
index f232b713640..e3134dedaef 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRangeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressRangeTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -83,7 +83,7 @@ public void testSingleAddress() {
     
     public void testRange( Boolean expected, IpAddressRange range, IpAddress... addresses ) {
         for ( IpAddress ipa : addresses ) {
-            assertEquals( "Testing " + ipa + " in " + range, expected, range.contains(ipa));
+            assertEquals(expected, range.contains(ipa), "Testing " + ipa + " in " + range);
         }
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressTest.java
index ce6ff29f1c1..e757472e316 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/ip/IpAddressTest.java
@@ -1,8 +1,9 @@
 package edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip;
 
 import java.util.Arrays;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBeanTest.java
index c260252f131..875cd02cc4c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/groups/impl/maildomain/MailDomainGroupServiceBeanTest.java
@@ -8,12 +8,11 @@
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
-import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.junit.jupiter.MockitoExtension;
 
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.TypedQuery;
 import java.util.*;
 import java.util.stream.Stream;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinAuthenticationProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinAuthenticationProviderTest.java
index ebf22f9dcb4..ff51260d43e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinAuthenticationProviderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/BuiltinAuthenticationProviderTest.java
@@ -7,9 +7,9 @@
 import edu.harvard.iq.dataverse.mocks.MockBuiltinUserServiceBean;
 import edu.harvard.iq.dataverse.mocks.MockPasswordValidatorServiceBean;
 import edu.harvard.iq.dataverse.validation.PasswordValidatorServiceBean;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Before;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  *
@@ -22,7 +22,7 @@ public class BuiltinAuthenticationProviderTest {
     MockBuiltinUserServiceBean bean = null;
     AuthenticationServiceBean authBean = null;
     
-    @Before
+    @BeforeEach
     public void setup() {
         bean = new MockBuiltinUserServiceBean();
         passwordValidatorService = new MockPasswordValidatorServiceBean();
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPageTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPageTest.java
index defbc4416b5..c81edd6d102 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPageTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/builtin/DataverseUserPageTest.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.authorization.providers.builtin;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.mockito.Mockito.mock;
 
 import java.sql.Timestamp;
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2AuthenticationProviderFactoryTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2AuthenticationProviderFactoryTest.java
index 5838fdee42c..ae73c505a4b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2AuthenticationProviderFactoryTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2AuthenticationProviderFactoryTest.java
@@ -2,8 +2,8 @@
 
 import java.util.HashMap;
 import java.util.Map;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java
index 80249cc89e8..672d7563669 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/OAuth2LoginBackingBeanTest.java
@@ -19,10 +19,10 @@
 import org.mockito.junit.jupiter.MockitoExtension;
 import org.omnifaces.util.Faces;
 
-import javax.faces.context.ExternalContext;
-import javax.faces.context.FacesContext;
-import javax.faces.context.Flash;
-import javax.servlet.http.HttpServletRequest;
+import jakarta.faces.context.ExternalContext;
+import jakarta.faces.context.FacesContext;
+import jakarta.faces.context.Flash;
+import jakarta.servlet.http.HttpServletRequest;
 
 import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
 
@@ -105,6 +105,7 @@ class ecft {
         @Mock DataverseSession session;
         @Mock OAuth2TokenDataServiceBean oauth2Tokens;
         Optional<String> redirect = Optional.of("/hellotest");
+        String state;
         
         @BeforeEach
         void setUp() throws IOException {
@@ -118,7 +119,11 @@ void setUp() throws IOException {
             when(externalContextMock.getRequest()).thenReturn(requestMock);
             lenient().when(externalContextMock.getFlash()).thenReturn(flashMock);
             lenient().when(requestMock.getReader()).thenReturn(reader);
-            doReturn(loginBackingBean.createState(testIdp, this.redirect)).when(requestMock).getParameter("state");
+            
+            // Save the state as we need it for injection (necessary because of PKCE support)
+            state = loginBackingBean.createState(testIdp, this.redirect);
+            doReturn(state).when(requestMock).getParameter("state");
+            
             // travel in time at least 10 milliseconds (remote calls & redirects are much likely longer)
             // (if not doing this tests become flaky on fast machinas)
             loginBackingBean.clock = Clock.offset(constantClock, Duration.ofMillis(10));
@@ -140,7 +145,7 @@ void newUser() throws Exception {
             // fake the code received from the provider
             when(requestMock.getParameter("code")).thenReturn(code);
             // let's deep-fake the result of getUserRecord()
-            doReturn(userRecord).when(testIdp).getUserRecord(code, null);
+            doReturn(userRecord).when(testIdp).getUserRecord(code, state, null);
     
             // WHEN (& then)
             // capture the redirect target from the faces context
@@ -168,7 +173,7 @@ void existingUser() throws Exception {
             // fake the code received from the provider
             when(requestMock.getParameter("code")).thenReturn(code);
             // let's deep-fake the result of getUserRecord()
-            doReturn(userRecord).when(testIdp).getUserRecord(code, null);
+            doReturn(userRecord).when(testIdp).getUserRecord(code, state, null);
             doReturn(tokenData).when(userRecord).getTokenData();
             // also fake the result of the lookup in the auth service
             doReturn(userIdentifier).when(userRecord).getUserRecordIdentifier();
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2APTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2APTest.java
index 786c30fb2d7..ed6b9789848 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2APTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GitHubOAuth2APTest.java
@@ -2,10 +2,10 @@
 
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class GitHubOAuth2APTest extends GitHubOAuth2AP {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2APTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2APTest.java
index 5cb2788c3ee..cfba755d2a1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2APTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2APTest.java
@@ -2,9 +2,9 @@
 
 import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
 import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
-import javax.json.Json;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import jakarta.json.Json;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class GoogleOAuth2APTest extends GoogleOAuth2AP {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java
new file mode 100644
index 00000000000..ee6823ef98a
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java
@@ -0,0 +1,249 @@
+package edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc;
+
+import com.nimbusds.oauth2.sdk.token.BearerAccessToken;
+import com.nimbusds.openid.connect.sdk.claims.UserInfo;
+import dasniko.testcontainers.keycloak.KeycloakContainer;
+import edu.harvard.iq.dataverse.UserServiceBean;
+import edu.harvard.iq.dataverse.api.auth.BearerTokenAuthMechanism;
+import edu.harvard.iq.dataverse.api.auth.doubles.BearerTokenKeyContainerRequestTestFake;
+import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
+import edu.harvard.iq.dataverse.authorization.UserRecordIdentifier;
+import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2Exception;
+import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2UserRecord;
+import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.authorization.users.User;
+import edu.harvard.iq.dataverse.mocks.MockAuthenticatedUser;
+import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
+import edu.harvard.iq.dataverse.util.testing.Tags;
+import org.htmlunit.FailingHttpStatusCodeException;
+import org.htmlunit.WebClient;
+import org.htmlunit.WebResponse;
+import org.htmlunit.html.HtmlForm;
+import org.htmlunit.html.HtmlInput;
+import org.htmlunit.html.HtmlPage;
+import org.htmlunit.html.HtmlSubmitInput;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.keycloak.OAuth2Constants;
+import org.keycloak.admin.client.Keycloak;
+import org.keycloak.admin.client.KeycloakBuilder;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientId;
+import static edu.harvard.iq.dataverse.authorization.providers.oauth2.oidc.OIDCAuthenticationProviderFactoryIT.clientSecret;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.mockito.Mockito.when;
+
+@Tag(Tags.INTEGRATION_TEST)
+@Tag(Tags.USES_TESTCONTAINERS)
+@Testcontainers(disabledWithoutDocker = true)
+@ExtendWith(MockitoExtension.class)
+// NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl()
+@LocalJvmSettings
+@JvmSetting(key = JvmSettings.OIDC_CLIENT_ID, value = clientId)
+@JvmSetting(key = JvmSettings.OIDC_CLIENT_SECRET, value = clientSecret)
+@JvmSetting(key = JvmSettings.OIDC_AUTH_SERVER_URL, method = "getAuthUrl")
+class OIDCAuthenticationProviderFactoryIT {
+    
+    static final String clientId = "test";
+    static final String clientSecret = "94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8";
+    static final String realm = "test";
+    static final String realmAdminUser = "admin";
+    static final String realmAdminPassword = "admin";
+    
+    static final String adminUser = "kcadmin";
+    static final String adminPassword = "kcpassword";
+    
+    // The realm JSON resides in conf/keycloak/test-realm.json and gets avail here using <testResources> in pom.xml
+    @Container
+    static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:22.0")
+        .withRealmImportFile("keycloak/test-realm.json")
+        .withAdminUsername(adminUser)
+        .withAdminPassword(adminPassword);
+    
+    // simple method to retrieve the issuer URL, referenced to by @JvmSetting annotations (do no delete)
+    private static String getAuthUrl() {
+        return keycloakContainer.getAuthServerUrl() + "/realms/" + realm;
+    }
+    
+    OIDCAuthProvider getProvider() throws Exception {
+        OIDCAuthProvider oidcAuthProvider = (OIDCAuthProvider) OIDCAuthenticationProviderFactory.buildFromSettings();
+        
+        assumeTrue(oidcAuthProvider.getMetadata().getTokenEndpointURI().toString()
+            .startsWith(keycloakContainer.getAuthServerUrl()));
+        
+        return oidcAuthProvider;
+    }
+    
+    // NOTE: This requires the "direct access grants" for the client to be enabled!
+    String getBearerTokenViaKeycloakAdminClient() throws Exception {
+        try (Keycloak keycloak = KeycloakBuilder.builder()
+            .serverUrl(keycloakContainer.getAuthServerUrl())
+            .grantType(OAuth2Constants.PASSWORD)
+            .realm(realm)
+            .clientId(clientId)
+            .clientSecret(clientSecret)
+            .username(realmAdminUser)
+            .password(realmAdminPassword)
+            .scope("openid")
+            .build()) {
+            return keycloak.tokenManager().getAccessTokenString();
+        }
+    }
+    
+    /**
+     * This basic test covers configuring an OIDC provider via MPCONFIG and being able to use it.
+     */
+    @Test
+    void testCreateProvider() throws Exception {
+        // given
+        OIDCAuthProvider oidcAuthProvider = getProvider();
+        String token = getBearerTokenViaKeycloakAdminClient();
+        assumeFalse(token == null);
+        
+        Optional<UserInfo> info = Optional.empty();
+        
+        // when
+        try {
+            info = oidcAuthProvider.getUserInfo(new BearerAccessToken(token));
+        } catch (OAuth2Exception e) {
+            System.out.println(e.getMessageBody());
+        }
+        
+        //then
+        assertTrue(info.isPresent());
+        assertEquals(realmAdminUser, info.get().getPreferredUsername());
+    }
+    
+    @Mock
+    UserServiceBean userService;
+    @Mock
+    AuthenticationServiceBean authService;
+    
+    @InjectMocks
+    BearerTokenAuthMechanism bearerTokenAuthMechanism;
+    
+    /**
+     * This test covers using an OIDC provider as authorization party when accessing the Dataverse API with a
+     * Bearer Token. See {@link BearerTokenAuthMechanism}. It needs to mock the auth services to avoid adding
+     * more dependencies.
+     */
+    @Test
+    @JvmSetting(key = JvmSettings.FEATURE_FLAG, varArgs = "api-bearer-auth", value = "true")
+    void testApiBearerAuth() throws Exception {
+        assumeFalse(userService == null);
+        assumeFalse(authService == null);
+        assumeFalse(bearerTokenAuthMechanism == null);
+        
+        // given
+        // Get the access token from the remote Keycloak in the container
+        String accessToken = getBearerTokenViaKeycloakAdminClient();
+        assumeFalse(accessToken == null);
+        
+        OIDCAuthProvider oidcAuthProvider = getProvider();
+        // This will also receive the details from the remote Keycloak in the container
+        UserRecordIdentifier identifier = oidcAuthProvider.getUserIdentifier(new BearerAccessToken(accessToken)).get();
+        String token = "Bearer " + accessToken;
+        BearerTokenKeyContainerRequestTestFake request = new BearerTokenKeyContainerRequestTestFake(token);
+        AuthenticatedUser user = new MockAuthenticatedUser();
+        
+        // setup mocks (we don't want or need a database here)
+        when(authService.getAuthenticationProviderIdsOfType(OIDCAuthProvider.class)).thenReturn(Set.of(oidcAuthProvider.getId()));
+        when(authService.getAuthenticationProvider(oidcAuthProvider.getId())).thenReturn(oidcAuthProvider);
+        when(authService.lookupUser(identifier)).thenReturn(user);
+        when(userService.updateLastApiUseTime(user)).thenReturn(user);
+        
+        // when (let's do this again, but now with the actual subject under test!)
+        User lookedUpUser = bearerTokenAuthMechanism.findUserFromRequest(request);
+        
+        // then
+        assertNotNull(lookedUpUser);
+        assertEquals(user, lookedUpUser);
+    }
+    
+    /**
+     * This test covers the {@link OIDCAuthProvider#buildAuthzUrl(String, String)} and
+     * {@link OIDCAuthProvider#getUserRecord(String, String, String)} methods that are used when
+     * a user authenticates via the JSF UI. It covers enabling PKCE, which is no hard requirement
+     * by the protocol, but might be required by some provider (as seen with Microsoft Azure AD).
+     * As we don't have a real browser, we use {@link WebClient} from HtmlUnit as a replacement.
+     */
+    @Test
+    @JvmSetting(key = JvmSettings.OIDC_PKCE_ENABLED, value = "true")
+    void testAuthorizationCodeFlowWithPKCE() throws Exception {
+        // given
+        String state = "foobar";
+        String callbackUrl = "http://localhost:8080/oauth2callback.xhtml";
+        
+        OIDCAuthProvider oidcAuthProvider = getProvider();
+        String authzUrl = oidcAuthProvider.buildAuthzUrl(state, callbackUrl);
+        //System.out.println(authzUrl);
+        
+        try (WebClient webClient = new WebClient()) {
+            webClient.getOptions().setCssEnabled(false);
+            webClient.getOptions().setJavaScriptEnabled(false);
+            // We *want* to know about the redirect, as it contains the data we need!
+            webClient.getOptions().setRedirectEnabled(false);
+            
+            HtmlPage loginPage = webClient.getPage(authzUrl);
+            assumeTrue(loginPage.getTitleText().contains("Sign in to " + realm));
+            
+            HtmlForm form = loginPage.getForms().get(0);
+            HtmlInput username = form.getInputByName("username");
+            HtmlInput password = form.getInputByName("password");
+            HtmlSubmitInput submit = form.getInputByName("login");
+            
+            username.type(realmAdminUser);
+            password.type(realmAdminPassword);
+            
+            FailingHttpStatusCodeException exception = assertThrows(FailingHttpStatusCodeException.class, submit::click);
+            assertEquals(302, exception.getStatusCode());
+            
+            WebResponse response = exception.getResponse();
+            assertNotNull(response);
+            
+            String callbackLocation = response.getResponseHeaderValue("Location");
+            assertTrue(callbackLocation.startsWith(callbackUrl));
+            //System.out.println(callbackLocation);
+            
+            String queryPart = callbackLocation.trim().split("\\?")[1];
+            Map<String,String> parameters = Pattern.compile("\\s*&\\s*")
+                .splitAsStream(queryPart)
+                .map(s -> s.split("=", 2))
+                .collect(Collectors.toMap(a -> a[0], a -> a.length > 1 ? a[1]: ""));
+            //System.out.println(map);
+            assertTrue(parameters.containsKey("code"));
+            assertTrue(parameters.containsKey("state"));
+            
+            OAuth2UserRecord userRecord = oidcAuthProvider.getUserRecord(
+                parameters.get("code"),
+                parameters.get("state"),
+                callbackUrl
+            );
+            
+            assertNotNull(userRecord);
+            assertEquals(realmAdminUser, userRecord.getUsername());
+        } catch (OAuth2Exception e) {
+            System.out.println(e.getMessageBody());
+            throw e;
+        }
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java
index 58d32b7d2a1..9ace90ac496 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/shib/ShibUtilTest.java
@@ -3,74 +3,46 @@
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Paths;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.Map;
 import java.util.UUID;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.servlet.http.HttpServletRequest;
-import static org.junit.Assert.*;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import jakarta.servlet.http.HttpServletRequest;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.mockito.Mockito.mock;
 
-@RunWith(Enclosed.class)
 public class ShibUtilTest {
-
-    @RunWith(Parameterized.class)
-    public static class ShibUtilParamTest {
-
-        @Parameters
-        public static Collection<String[]> data() {
-            return Arrays.asList(new String[][] {
-                { "John", "Harvard", "John", "Harvard", null },
-                { "Guido", "van Rossum", "Guido", "van Rossum", null },
-                { "Philip Seymour", "Hoffman", "Philip Seymour", "Hoffman", "Philip Seymour Hoffman" },
-                { "Edward", "Cummings", "Edward;e e", "Cummings", null },
-                { "Edward", "Cummings", "Edward;e e", "Cummings", "e e cummings" },
-                { "Anthony", "Stark", "Tony;Anthony", "Stark", null },
-                { "Anthony", "Stark", "Anthony;Tony", "Stark", null },
-                { "Antoni", "Gaudí", "Antoni", "Gaudí i Cornet;Gaudí", null },
-                { "Jane", "Doe", null, null, "Jane Doe" },
-                /**
-                * @todo Make findBestFirstAndLastName smart enough to know that the last name
-                *       should be "Hoffman" rather than "Seymour".
-                */
-                { "Philip", "Seymour", null, null, "Philip Seymour Hoffman" },
-                { null, null, null, null, "" }
-            });
-        }
-
-        @Parameter
-        public String expectedFirstName;
-
-        @Parameter(1)
-        public String expectedLastName;
-
-        @Parameter(2)
-        public String actualFirstName;
-
-        @Parameter(3)
-        public String actualLastName;
-
-        @Parameter(4)
-        public String actualDisplayName;
-
-        @Test
-        public void testFindBestFirstAndLastName() {
-
-            // ShibUserNameFields expected1 = new ShibUserNameFields("John", "Harvard");
-            ShibUserNameFields actualValues = ShibUtil.findBestFirstAndLastName(actualFirstName, actualLastName, actualDisplayName);
-            assertEquals(expectedFirstName, actualValues.getFirstName());
-            assertEquals(expectedLastName, actualValues.getLastName());
-        }
+    
+    @ParameterizedTest
+    @CsvSource(value = {
+        "John,Harvard,John,Harvard,NULL",
+        "Guido,van Rossum,Guido,van Rossum,NULL",
+        "Philip Seymour,Hoffman,Philip Seymour,Hoffman,Philip Seymour Hoffman",
+        "Edward,Cummings,Edward;e e,Cummings,NULL",
+        "Edward,Cummings,Edward;e e,Cummings,e e cummings",
+        "Anthony,Stark,Tony;Anthony,Stark,NULL",
+        "Anthony,Stark,Anthony;Tony,Stark,NULL",
+        "Antoni,Gaudí,Antoni,Gaudí i Cornet;Gaudí,NULL",
+        "Jane,Doe,NULL,NULL,Jane Doe",
+        /**
+         * @todo Make findBestFirstAndLastName smart enough to know that the last name
+         *       should be "Hoffman" rather than "Seymour".
+         */
+        "Philip,Seymour,NULL,NULL,Philip Seymour Hoffman",
+        "NULL,NULL,NULL,NULL,EMPTY"
+    }, nullValues = "NULL", emptyValue = "EMPTY")
+    void testFindBestFirstAndLastName(String expectedFirstName, String expectedLastName, String actualFirstName,
+                                      String actualLastName, String actualDisplayName) {
+        // ShibUserNameFields expected1 = new ShibUserNameFields("John", "Harvard");
+        ShibUserNameFields actualValues = ShibUtil.findBestFirstAndLastName(actualFirstName, actualLastName, actualDisplayName);
+        assertEquals(expectedFirstName, actualValues.getFirstName());
+        assertEquals(expectedLastName, actualValues.getLastName());
     }
+    
 
     public static class ShibUtilNoParamTest {
 
@@ -130,16 +102,16 @@ public void testFindSingleValue() {
         }
 
         @Test
-        public void testGenerateFriendlyLookingUserIdentifer() {
+        public void testGenerateFriendlyLookingUserIdentifier() {
             int lengthOfUuid = UUID.randomUUID().toString().length();
-            assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifer("uid1", null));
-            assertEquals(" leadingWhiteSpace", ShibUtil.generateFriendlyLookingUserIdentifer(" leadingWhiteSpace", null));
-            assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifer("uid1", "email1@example.com"));
-            assertEquals("email1", ShibUtil.generateFriendlyLookingUserIdentifer(null, "email1@example.com"));
-            assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, null).length());
-            assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, "").length());
-            assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer("", null).length());
-            assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifer(null, "junkEmailAddress").length());
+            assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifier("uid1", null));
+            assertEquals(" leadingWhiteSpace", ShibUtil.generateFriendlyLookingUserIdentifier(" leadingWhiteSpace", null));
+            assertEquals("uid1", ShibUtil.generateFriendlyLookingUserIdentifier("uid1", "email1@example.com"));
+            assertEquals("email1", ShibUtil.generateFriendlyLookingUserIdentifier(null, "email1@example.com"));
+            assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, null).length());
+            assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, "").length());
+            assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier("", null).length());
+            assertEquals(lengthOfUuid, ShibUtil.generateFriendlyLookingUserIdentifier(null, "junkEmailAddress").length());
         }
 
         @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
index 5606bbe6aa3..7bd802b3b02 100644
--- a/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/authorization/users/AuthenticatedUserTest.java
@@ -18,23 +18,24 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Before;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.extension.ExtendWith;
+
+import jakarta.json.JsonObject;
+import jakarta.json.JsonString;
 
-import javax.json.JsonObject;
-import javax.json.JsonString;
-import org.junit.runner.RunWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
 /**
  * Tested class: AuthenticatedUser.java
  *
  * @author bsilverstein
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class AuthenticatedUserTest {
 
     @Mock
@@ -51,7 +52,7 @@ public AuthenticatedUserTest() {
     public static final String IDENTIFIER_PREFIX = "@";
     public static final Set<Type> mutedTypes = EnumSet.of(Type.ASSIGNROLE, Type.REVOKEROLE);
 
-    @Before
+    @BeforeEach
     public void setUp() {
         testUser = MocksFactory.makeAuthenticatedUser("Homer", "Simpson");
         expResult = testUser.getCreatedTime();
@@ -363,14 +364,14 @@ public void testMutingInJson() {
         JsonObject jObject = testUser.toJson().build();
 
         Set<String> mutedEmails = new HashSet<>(jObject.getJsonArray("mutedEmails").getValuesAs(JsonString::getString));
-        assertTrue("Set contains two elements", mutedEmails.size() == 2);
-        assertTrue("Set contains REVOKEROLE", mutedEmails.contains("REVOKEROLE"));
-        assertTrue("Set contains ASSIGNROLE", mutedEmails.contains("ASSIGNROLE"));
+        assertTrue(mutedEmails.size() == 2, "Set contains two elements");
+        assertTrue(mutedEmails.contains("REVOKEROLE"), "Set contains REVOKEROLE");
+        assertTrue(mutedEmails.contains("ASSIGNROLE"), "Set contains ASSIGNROLE");
 
         Set<String> mutedNotifications = new HashSet<>(jObject.getJsonArray("mutedNotifications").getValuesAs(JsonString::getString));
-        assertTrue("Set contains two elements", mutedNotifications.size() == 2);
-        assertTrue("Set contains REVOKEROLE", mutedNotifications.contains("REVOKEROLE"));
-        assertTrue("Set contains ASSIGNROLE", mutedNotifications.contains("ASSIGNROLE"));
+        assertTrue(mutedNotifications.size() == 2, "Set contains two elements");
+        assertTrue(mutedNotifications.contains("REVOKEROLE"), "Set contains REVOKEROLE");
+        assertTrue(mutedNotifications.contains("ASSIGNROLE"), "Set contains ASSIGNROLE");
     }
 
     @Test
@@ -400,10 +401,10 @@ public void testTypeTokenizer() {
                 Type.tokenizeToSet(" ASSIGNROLE , CREATEDV,REVOKEROLE  ")
             )
         );
-        assertTrue("typeSet contains 3 elements", typeSet.size() == 3);
-        assertTrue("typeSet contains ASSIGNROLE", typeSet.contains(Type.ASSIGNROLE));
-        assertTrue("typeSet contains CREATEDV", typeSet.contains(Type.CREATEDV));
-        assertTrue("typeSet contains REVOKEROLE", typeSet.contains(Type.REVOKEROLE));
+        assertTrue(typeSet.size() == 3, "typeSet contains 3 elements");
+        assertTrue(typeSet.contains(Type.ASSIGNROLE), "typeSet contains ASSIGNROLE");
+        assertTrue(typeSet.contains(Type.CREATEDV), "typeSet contains CREATEDV");
+        assertTrue(typeSet.contains(Type.REVOKEROLE), "typeSet contains REVOKEROLE");
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java
index 95deafc0cfe..2b526b8a449 100644
--- a/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/branding/BrandingUtilTest.java
@@ -7,8 +7,8 @@
 import java.util.Arrays;
 import java.util.logging.Logger;
 import java.util.stream.Stream;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.junit.jupiter.api.AfterAll;
diff --git a/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailDataTest.java b/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailDataTest.java
index 45c4162188a..10de20239e5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailDataTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailDataTest.java
@@ -1,11 +1,11 @@
 package edu.harvard.iq.dataverse.confirmemail;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 
@@ -14,13 +14,13 @@ public class ConfirmEmailDataTest {
     private ConfirmEmailData instance;
     private AuthenticatedUser user;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.user = new AuthenticatedUser();
         this.instance = new ConfirmEmailData(user, 60);
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
         this.instance = null;
         this.user = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailUtilTest.java
index 8fdc7dc38d5..43795f0114f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/confirmemail/ConfirmEmailUtilTest.java
@@ -1,61 +1,41 @@
 package edu.harvard.iq.dataverse.confirmemail;
 
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
 import java.sql.Timestamp;
-import java.util.Collection;
-import java.util.Arrays;
+import java.util.stream.Stream;
 
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-@RunWith(Enclosed.class)
 public class ConfirmEmailUtilTest {
-
-    @RunWith(Parameterized.class)
-    public static class ConfirmEmailUtilParamTest {
-
-        public String timeAsFriendlyString;
-        public int timeInMinutes;
-
-        public ConfirmEmailUtilParamTest(String timeAsFriendlyString, int timeInSeconds) {
-            this.timeAsFriendlyString = timeAsFriendlyString;
-            this.timeInMinutes = timeInSeconds;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        { "48 hours", 2880 }, 
-                        { "24 hours", 1440 },
-                        { "2.75 hours", 165 },
-                        { "2.5 hours", 150 },
-                        { "1.5 hours", 90 }, 
-                        { "1 hour", 60 }, 
-                        { "30 minutes", 30 }, 
-                        { "1 minute", 1 } 
-                    }
-            );
-        }
-
-        @Test
-        public void friendlyExpirationTimeTest() {
-            assertEquals(timeAsFriendlyString, ConfirmEmailUtil.friendlyExpirationTime(timeInMinutes));
-        }
+    
+    static Stream<Arguments> dataPoints() {
+        return Stream.of(
+            Arguments.of("48 hours", 2880),
+            Arguments.of("24 hours", 1440),
+            Arguments.of("2.75 hours", 165),
+            Arguments.of("2.5 hours", 150),
+            Arguments.of("1.5 hours", 90),
+            Arguments.of("1 hour", 60),
+            Arguments.of("30 minutes", 30),
+            Arguments.of("1 minute", 1)
+        );
     }
-
-    public static class ConfirmEmailUtilNoParamTest {
-
-        @Test
-        public void testGrandfatheredTime() {
-            System.out.println();
-            System.out.println("Grandfathered account timestamp test");
-            System.out.println("Grandfathered Time (y2k): " + ConfirmEmailUtil.getGrandfatheredTime());
-            assertEquals(Timestamp.valueOf("2000-01-01 00:00:00.0"), ConfirmEmailUtil.getGrandfatheredTime());
-            System.out.println();
-        }
+    
+    @ParameterizedTest
+    @MethodSource("dataPoints")
+    void friendlyExpirationTimeTest(String timeAsFriendlyString, int timeInMinutes) {
+        assertEquals(timeAsFriendlyString, ConfirmEmailUtil.friendlyExpirationTime(timeInMinutes));
+    }
+    
+    @Test
+    void testGrandfatheredTime() {
+        //System.out.println("Grandfathered account timestamp test");
+        //System.out.println("Grandfathered Time (y2k): " + ConfirmEmailUtil.getGrandfatheredTime());
+        assertEquals(Timestamp.valueOf("2000-01-01 00:00:00.0"), ConfirmEmailUtil.getGrandfatheredTime());
+        //System.out.println();
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataAccessTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataAccessTest.java
index 1ff914adff9..f7ce061fb24 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataAccessTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/DataAccessTest.java
@@ -59,4 +59,24 @@ void testCreateNewStorageIO_createsFileAccessIObyDefault() throws IOException {
     StorageIO<Dataset> storageIo = DataAccess.createNewStorageIO(dataset, "valid-tag");
     assertTrue(storageIo.getClass().equals(FileAccessIO.class));
   }
+  
+  @Test
+  void testGetLocationFromStorageId() {
+      Dataset d = new Dataset();
+      d.setAuthority("10.5072");
+      d.setIdentifier("FK2/ABCDEF");
+      assertEquals("s3://10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece",
+              DataAccess.getLocationFromStorageId("s3://18b39722140-50eb7d3c5ece", d));
+      assertEquals("10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece",
+              DataAccess.getLocationFromStorageId("18b39722140-50eb7d3c5ece", d));
+
+  }
+  
+  @Test
+  void testGetStorageIdFromLocation() {
+      assertEquals("file://18b39722140-50eb7d3c5ece",
+              DataAccess.getStorageIdFromLocation("file://10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece"));
+      assertEquals("s3://18b39722140-50eb7d3c5ece",
+              DataAccess.getStorageIdFromLocation("s3://bucketname:10.5072/FK2/ABCDEF/18b39722140-50eb7d3c5ece"));
+  }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
index 95621dd8750..552d76b74e8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/FileAccessIOTest.java
@@ -23,13 +23,13 @@
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  *
@@ -51,7 +51,7 @@ public class FileAccessIOTest {
     public FileAccessIOTest() {
     }
 
-    @Before
+    @BeforeEach
     public void setUpClass() throws IOException {
         dataverse = MocksFactory.makeDataverse();
         dataset = MocksFactory.makeDataset();
@@ -77,7 +77,7 @@ public void setUpClass() throws IOException {
         }
     }
 
-    @After
+    @AfterEach
     public void tearDownClass() throws IOException {
         FileUtils.deleteDirectory(new File("/tmp/files/"));
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java
new file mode 100644
index 00000000000..ad980aa28cd
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/GlobusOverlayAccessIOTest.java
@@ -0,0 +1,151 @@
+/*
+ * SPDX-License-Identifier: Apache 2.0
+ */
+package edu.harvard.iq.dataverse.dataaccess;
+
+import edu.harvard.iq.dataverse.DOIServiceBean;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.mocks.MocksFactory;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import static org.junit.jupiter.api.Assertions.*;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.mockito.junit.jupiter.MockitoSettings;
+import org.mockito.quality.Strictness;
+import java.io.IOException;
+import java.nio.file.Paths;
+
+@ExtendWith(MockitoExtension.class)
+@MockitoSettings(strictness = Strictness.STRICT_STUBS)
+public class GlobusOverlayAccessIOTest {
+
+    @Mock
+
+    private Dataset dataset;
+    private DataFile mDatafile;
+    private DataFile rDatafile;
+    private String baseStoreId1 = "182ad2bda2f-c3508e719076";
+    private String baseStoreId2 = "182ad2bda2f-c3508e719077";
+    private String logoPath = "d7c42580-6538-4605-9ad8-116a61982644/hdc1/image002.mrc";
+    private String authority = "10.5072";
+    private String identifier = "F2ABCDEF";
+
+    @BeforeAll
+    public static void setUp() {
+        // Base Store
+        System.setProperty("dataverse.files.base.type", DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER);
+        System.setProperty("dataverse.files.base.label", "default");
+        System.setProperty("dataverse.files.base.directory", "/tmp/files");
+
+        // Managed Globus Store
+
+        // Nonsense endpoint/paths
+        System.setProperty("dataverse.files.globusm." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH,
+                "d7c42580-6538-4605-9ad8-116a61982644/hdc1");
+        // Nonsense value of the right form
+        System.setProperty("dataverse.files.globusm.globus-token",
+                "NzM2NTQxMDMtOTg1Yy00NDgzLWE1MTYtYTJlNDk0ZmI3MDhkOkpJZGZaZGxMZStQNUo3MTRIMDY2cDh6YzIrOXI2RmMrbFR6UG0zcSsycjA9");
+        System.setProperty("dataverse.files.globusm.remote-store-name", "GlobusEndpoint1");
+        System.setProperty("dataverse.files.globusm.type", "globus");
+        System.setProperty("dataverse.files.globusm.managed", "true");
+        System.setProperty("dataverse.files.globusm.base-store", "base");
+        System.setProperty("dataverse.files.globusm.label", "globusManaged");
+
+        // Remote Store
+        System.setProperty("dataverse.files.globusr.type", "globus");
+        System.setProperty("dataverse.files.globusr.base-store", "base");
+        System.setProperty("dataverse.files.globusr.managed", "false");
+        System.setProperty("dataverse.files.globusr.label", "globusRemote");
+        System.setProperty(
+                "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS,
+                "d7c42580-6538-4605-9ad8-116a61982644/hdc1");
+        System.setProperty("dataverse.files.globusr.remote-store-name", "DemoDataCorp");
+
+    }
+
+    @AfterAll
+    public static void tearDown() {
+        System.clearProperty("dataverse.files.base.type");
+        System.clearProperty("dataverse.files.base.label");
+        System.clearProperty("dataverse.files.base.directory");
+        System.clearProperty("dataverse.files.globusm." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH);
+        System.clearProperty("dataverse.files.globusm.globus-token");
+        System.clearProperty("dataverse.files.globusm.remote-store-name");
+        System.clearProperty("dataverse.files.globusm.type");
+        System.clearProperty("dataverse.files.globusm.managed");
+        System.clearProperty("dataverse.files.globusm.base-store");
+        System.clearProperty("dataverse.files.globusm.label");
+        System.clearProperty("dataverse.files.globusr.type");
+        System.clearProperty("dataverse.files.globusr.base-store");
+        System.clearProperty("dataverse.files.globusr.managed");
+        System.clearProperty("dataverse.files.globusm.label");
+        System.clearProperty(
+                "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS);
+        System.clearProperty("dataverse.files.globusr.remote-store-name");
+    }
+
+    @Test
+    void testGlobusOverlayIdentifiers() throws IOException {
+
+        dataset = MocksFactory.makeDataset();
+        dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/",
+                DOIServiceBean.DOI_RESOLVER_URL, null));
+        mDatafile = MocksFactory.makeDataFile();
+        mDatafile.setOwner(dataset);
+        mDatafile.setStorageIdentifier("globusm://" + baseStoreId1);
+
+        rDatafile = MocksFactory.makeDataFile();
+        rDatafile.setOwner(dataset);
+        rDatafile.setStorageIdentifier("globusr://" + baseStoreId2 + "//" + logoPath);
+
+        assertTrue(GlobusOverlayAccessIO.isValidIdentifier("globusm", mDatafile.getStorageIdentifier()));
+        assertTrue(GlobusOverlayAccessIO.isValidIdentifier("globusr", rDatafile.getStorageIdentifier()));
+        assertFalse(GlobusOverlayAccessIO.isValidIdentifier("globusm", "globusr://localid//../of/the/hill"));
+        assertFalse(GlobusOverlayAccessIO.isValidIdentifier("globusr",
+                rDatafile.getStorageIdentifier().replace("hdc1", "")));
+
+        // We can read the storageIdentifier and get the driver
+        assertTrue(mDatafile.getStorageIdentifier()
+                .startsWith(DataAccess.getStorageDriverFromIdentifier(mDatafile.getStorageIdentifier())));
+        assertTrue(rDatafile.getStorageIdentifier()
+                .startsWith(DataAccess.getStorageDriverFromIdentifier(rDatafile.getStorageIdentifier())));
+
+        // We can get the driver type from it's ID
+        assertTrue(DataAccess.getDriverType("globusm").equals(System.getProperty("dataverse.files.globusm.type")));
+        assertTrue(DataAccess.getDriverType("globusr").equals(System.getProperty("dataverse.files.globusr.type")));
+
+        // When we get a StorageIO for the file, it is the right type
+        StorageIO<DataFile> mStorageIO = DataAccess.getStorageIO(mDatafile);
+        assertTrue(mStorageIO instanceof GlobusOverlayAccessIO);
+        StorageIO<DataFile> rStorageIO = DataAccess.getStorageIO(rDatafile);
+        assertTrue(rStorageIO instanceof GlobusOverlayAccessIO);
+
+        // When we use it, we can get properties like the remote store name
+        assertTrue(mStorageIO.getRemoteStoreName()
+                .equals(System.getProperty("dataverse.files.globusm.remote-store-name")));
+        assertTrue(rStorageIO.getRemoteStoreName()
+                .equals(System.getProperty("dataverse.files.globusr.remote-store-name")));
+
+        // Storage Locations are correct
+        String mLocation = mStorageIO.getStorageLocation();
+        assertEquals("globusm:///" + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage()
+                + "/" + baseStoreId1, mLocation);
+        String rLocation = rStorageIO.getStorageLocation();
+        assertEquals("globusr://" + baseStoreId2 + "//" + logoPath, rLocation);
+
+        // If we ask for the path for an aux file, it is correct
+        System.out.println(Paths.get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), authority,
+                identifier, baseStoreId1 + ".auxobject").toString());
+        System.out.println(mStorageIO.getAuxObjectAsPath("auxobject").toString());
+        assertTrue(Paths.get(System.getProperty("dataverse.files.base.directory", "/tmp/files"), authority, identifier,
+                baseStoreId1 + ".auxobject").equals(mStorageIO.getAuxObjectAsPath("auxobject")));
+        assertTrue(Paths.get(System.getProperty("dataverse.files.base.directory", "/tmp/files"), authority, identifier,
+                baseStoreId2 + ".auxobject").equals(rStorageIO.getAuxObjectAsPath("auxobject")));
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java
index f66b3306dda..1c371881ba6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/RemoteOverlayAccessIOTest.java
@@ -4,6 +4,7 @@
  */
 package edu.harvard.iq.dataverse.dataaccess;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.GlobalId;
@@ -33,7 +34,8 @@ public class RemoteOverlayAccessIOTest {
     private DataFile badDatafile;
     private String baseStoreId="182ad2bda2f-c3508e719076";
     private String logoPath = "images/dataverse_project_logo.svg";
-    private String pid = "10.5072/F2/ABCDEF";
+    private String authority = "10.5072";
+    private String identifier = "F2/ABCDEF";
 
     @BeforeEach
     public void setUp() {
@@ -48,7 +50,7 @@ public void setUp() {
         System.setProperty("dataverse.files.file.label", "default");
         datafile = MocksFactory.makeDataFile();
         dataset = MocksFactory.makeDataset();
-        dataset.setGlobalId(GlobalId.parse("doi:" + pid).get());
+        dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/", DOIServiceBean.DOI_RESOLVER_URL, null));
         datafile.setOwner(dataset);
         datafile.setStorageIdentifier("test://" + baseStoreId + "//" + logoPath);
 
@@ -101,10 +103,10 @@ void testRemoteOverlayFiles() throws IOException {
         assertTrue(remoteIO.getSize() > 0);
         // If we ask for the path for an aux file, it is correct
         System.out.println(Paths
-                .get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), pid, baseStoreId + ".auxobject").toString());
+                .get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), authority, identifier, baseStoreId + ".auxobject").toString());
         System.out.println(remoteIO.getAuxObjectAsPath("auxobject").toString());
         assertTrue(Paths
-                .get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), pid, baseStoreId + ".auxobject")
+                .get(System.getProperty("dataverse.files.file.directory", "/tmp/files"), authority, identifier, baseStoreId + ".auxobject")
                 .equals(remoteIO.getAuxObjectAsPath("auxobject")));
         IOException thrown = assertThrows(IOException.class, () -> DataAccess.getStorageIO(badDatafile),
                 "Expected getStorageIO() to throw, but it didn't");
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOLocalstackIT.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOLocalstackIT.java
new file mode 100644
index 00000000000..140b558fc1d
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIOLocalstackIT.java
@@ -0,0 +1,153 @@
+package edu.harvard.iq.dataverse.dataaccess;
+
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.client.builder.AwsClientBuilder;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3ClientBuilder;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.DvObject;
+import edu.harvard.iq.dataverse.util.FileUtil;
+import edu.harvard.iq.dataverse.util.testing.Tags;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Scanner;
+import java.util.UUID;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.testcontainers.containers.localstack.LocalStackContainer;
+import static org.testcontainers.containers.localstack.LocalStackContainer.Service.S3;
+import org.testcontainers.junit.jupiter.Container;
+import org.testcontainers.junit.jupiter.Testcontainers;
+import org.testcontainers.utility.DockerImageName;
+
+// https://java.testcontainers.org/modules/localstack/
+@Tag(Tags.INTEGRATION_TEST)
+@Tag(Tags.USES_TESTCONTAINERS)
+@Testcontainers(disabledWithoutDocker = true)
+@ExtendWith(MockitoExtension.class)
+class S3AccessIOLocalstackIT {
+
+    @BeforeAll
+    static void setUp() {
+        System.setProperty(staticFiles + "access-key", localstack.getAccessKey());
+        System.setProperty(staticFiles + "secret-key", localstack.getSecretKey());
+        System.setProperty(staticFiles + "custom-endpoint-url", localstack.getEndpoint().toString());
+        System.setProperty(staticFiles + "custom-endpoint-region", localstack.getRegion());
+        System.setProperty(staticFiles + "bucket-name", bucketName);
+
+        s3 = AmazonS3ClientBuilder
+                .standard()
+                .withEndpointConfiguration(
+                        new AwsClientBuilder.EndpointConfiguration(
+                                localstack.getEndpoint().toString(),
+                                localstack.getRegion()
+                        )
+                )
+                .withCredentials(
+                        new AWSStaticCredentialsProvider(
+                                new BasicAWSCredentials(localstack.getAccessKey(), localstack.getSecretKey())
+                        )
+                )
+                .build();
+        s3.createBucket(bucketName);
+    }
+
+    static final String storageDriverId = "si1";
+    static final String staticFiles = "dataverse.files." + storageDriverId + ".";
+    static final String bucketName = "bucket-" + UUID.randomUUID().toString();
+    static AmazonS3 s3 = null;
+
+    static DockerImageName localstackImage = DockerImageName.parse("localstack/localstack:2.3.2");
+    @Container
+    static LocalStackContainer localstack = new LocalStackContainer(localstackImage)
+            .withServices(S3);
+
+    //new S3AccessIO<>(dvObject, req, storageDriverId);
+    @Test
+    void test1() {
+        DvObject dvObject = new Dataset();
+        dvObject.setProtocol("doi");
+        dvObject.setAuthority("10.5072/FK2");
+        dvObject.setIdentifier("ABC123");
+        DataAccessRequest req = null;
+        S3AccessIO s3AccessIO = new S3AccessIO<>(dvObject, req, storageDriverId);
+        String textIn = "Hello";
+        InputStream inputStream = new ByteArrayInputStream(textIn.getBytes());
+        // Without this temp directory, saveInputStream fails
+        String tempDirPath = "/tmp/dataverse/temp";
+        try {
+            Files.createDirectories(Paths.get(tempDirPath));
+        } catch (IOException ex) {
+            System.out.println("failed to create " + tempDirPath + ": " + ex);
+        }
+        try {
+            s3AccessIO.saveInputStream(inputStream);
+            System.out.println("save complete!");
+        } catch (IOException ex) {
+            System.out.println("saveInputStream exception: " + ex);
+        }
+
+        String textOut = null;
+        try {
+            textOut = new Scanner(s3AccessIO.getInputStream()).useDelimiter("\\A").next();
+        } catch (IOException ex) {
+        }
+        assertEquals(textIn, textOut);
+    }
+
+    // testing a specific constructor
+    @Test
+    void test2() {
+        Dataset dataset = new Dataset();
+        dataset.setProtocol("doi");
+        dataset.setAuthority("10.5072/FK2");
+        dataset.setIdentifier("ABC123");
+        String sid = sid = bucketName + dataset.getAuthorityForFileStorage() + "/" + dataset.getIdentifierForFileStorage() + "/" + FileUtil.generateStorageIdentifier();
+        S3AccessIO<DataFile> s3io = new S3AccessIO<DataFile>(sid, storageDriverId);
+    }
+
+    // just to test this: saveInputStream exception: java.io.IOException: ERROR: s3 not initialised
+    @Test
+    void test3() {
+        DvObject dvObject = new Dataset();
+        dvObject.setProtocol("doi");
+        dvObject.setAuthority("10.5072/FK2");
+        dvObject.setIdentifier("ABC123");
+        DataAccessRequest req = null;
+        AmazonS3 nullAmazonS3 = null;
+        S3AccessIO s3AccessIO = new S3AccessIO<>(dvObject, req, nullAmazonS3, storageDriverId);
+        InputStream inputStream = null;
+        try {
+            s3AccessIO.saveInputStream(inputStream);
+            System.out.println("save complete!");
+        } catch (IOException ex) {
+            System.out.println("saveInputStream exception: " + ex);
+        }
+    }
+
+    @Test
+    void test4() {
+        DvObject dvObject = new DataFile();
+        dvObject.setProtocol("doi");
+        dvObject.setAuthority("10.5072/FK2");
+        dvObject.setIdentifier("ABC123");
+        DataAccessRequest req = null;
+        S3AccessIO s3AccessIO = new S3AccessIO<>(dvObject, req, storageDriverId);
+        InputStream inputStream = null;
+        try {
+            s3AccessIO.saveInputStream(inputStream);
+            System.out.println("save complete!");
+        } catch (IOException ex) {
+            System.out.println("saveInputStream exception: " + ex);
+        }
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
index 83cb0c72786..84a241b90f6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/StorageIOTest.java
@@ -21,8 +21,9 @@
 import java.util.List;
 //import org.apache.commons.httpclient.Header;
 //import org.apache.commons.httpclient.methods.GetMethod;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -242,4 +243,16 @@ public void testGenerateVariableHeader() {
         assertEquals("Random	Random\n", instance.generateVariableHeader(dvs));
         assertEquals(null, instance.generateVariableHeader(null));
     }
+    
+    @Test
+    public void testGetConfigParam() {
+        System.setProperty("dataverse.files.globus.type", "globus");
+    assertEquals("globus", StorageIO.getConfigParamForDriver("globus", StorageIO.TYPE));
+    System.clearProperty("dataverse.files.globus.type");
+    }
+    
+    @Test
+    public void testGetConfigParamWithDefault() {
+    assertEquals(DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER, StorageIO.getConfigParamForDriver("globus", AbstractRemoteOverlayAccessIO.BASE_STORE, DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER));
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
index c1aa6b5fca3..942e4329384 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataaccess/SwiftAccessIOTest.java
@@ -12,11 +12,10 @@
 import java.security.InvalidKeyException;
 import java.security.NoSuchAlgorithmException;
 import java.security.SignatureException;
-import javax.crypto.Mac;
-import javax.crypto.spec.SecretKeySpec;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Before;
+
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
 
 /**
  *
@@ -34,7 +33,7 @@ public class SwiftAccessIOTest {
     public SwiftAccessIOTest() {
     }
 
-    @Before
+    @BeforeEach
     public void setUpClass() throws IOException {
         datafile = MocksFactory.makeDataFile();
         dataset = MocksFactory.makeDataset();
diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBeanIT.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBeanIT.java
index 7f7e734f13e..a37f8b724fe 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBeanIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleServiceBeanIT.java
@@ -11,15 +11,15 @@
 import java.util.Calendar;
 import java.util.TimeZone;
 import java.util.logging.Logger;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import javax.json.JsonObject;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import jakarta.json.JsonObject;
 import static java.lang.Thread.sleep;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
 
 /**
  * These tests are not expected to pass unless you have a Data Capture Module
@@ -80,7 +80,7 @@ public void testStartFileSystemImportJob() throws InterruptedException, DataCapt
         dataset.setAuthority("10.5072/FK2");
         dataset.setIdentifier("OSQSB9");
         dataset.setId(728l);
-        String url = "http://localhost:8080/api/batch/jobs/import/datasets/files/:persistentId?persistentId=" + dataset.getGlobalIdString();
+        String url = "http://localhost:8080/api/batch/jobs/import/datasets/files/:persistentId?persistentId=" + dataset.getGlobalId().asString();
         System.out.print("url: " + url);
         String uploadFolder = "OSQSB9";
         String apiToken = "b440cc45-0ce9-4ae6-aabf-72f50fb8b8f2";
diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
index a00daef63c2..eb19f22df63 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java
@@ -7,18 +7,18 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeAuthenticatedUser;
 import java.io.UnsupportedEncodingException;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 import org.apache.http.HttpResponseFactory;
 import org.apache.http.HttpStatus;
 import org.apache.http.HttpVersion;
 import org.apache.http.entity.StringEntity;
 import org.apache.http.impl.DefaultHttpResponseFactory;
 import org.apache.http.message.BasicStatusLine;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.Test;
 
 public class DataCaptureModuleUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
index 93eabfbf8af..8eed2a33c5a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataset/DatasetUtilTest.java
@@ -1,7 +1,6 @@
 package edu.harvard.iq.dataverse.dataset;
 
 import edu.harvard.iq.dataverse.DataFile;
-import edu.harvard.iq.dataverse.DataFileCategory;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetField;
 import edu.harvard.iq.dataverse.DatasetFieldType;
@@ -10,11 +9,11 @@
 import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
 import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
-import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.List;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class DatasetUtilTest {
 
@@ -65,6 +64,7 @@ public void testGetThumbnailRestricted() {
         DatasetThumbnail result = DatasetUtil.getThumbnail(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE);
         assertNull(result);
     }
+
     /**
      * Test of deleteDatasetLogo method, of class DatasetUtil.
      */
@@ -160,4 +160,21 @@ public void testGetDatasetSummaryField_withSelectionWithoutMatches() {
 
         assertEquals(0, DatasetUtil.getDatasetSummaryFields(version, "object").size());
     }
+
+    @Test
+    public void testGetDatasetSummaryFieldNames_emptyCustomFields() {
+        String[] actual = DatasetUtil.getDatasetSummaryFieldNames(null);
+        String[] expected = DatasetUtil.datasetDefaultSummaryFieldNames.split(",");
+
+        assertArrayEquals(expected, actual);
+    }
+
+    @Test
+    public void testGetDatasetSummaryFieldNames_notEmptyCustomFields() {
+        String testCustomFields = "test1,test2";
+        String[] actual = DatasetUtil.getDatasetSummaryFieldNames(testCustomFields);
+        String[] expected = testCustomFields.split(",");
+
+        assertArrayEquals(expected, actual);
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileCheckerTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileCheckerTest.java
index f7fe81b16e3..ced15594f85 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileCheckerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/DuplicateFileCheckerTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.datasetutility;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.mockito.Mockito.mock;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import edu.harvard.iq.dataverse.DataFile;
@@ -19,13 +19,13 @@ public class DuplicateFileCheckerTest {
     private DuplicateFileChecker duplicateFileChecker;
     private DatasetVersionServiceBean datasetVersionServiceBean;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.datasetVersionServiceBean = mock(DatasetVersionServiceBean.class);
         this.duplicateFileChecker = new DuplicateFileChecker(datasetVersionServiceBean);
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
         duplicateFileChecker = null;
     }
@@ -34,9 +34,9 @@ public void tearDown() {
     // test constructor
     // ----------------------------------------------------------------------------------------------------------
 
-    @Test(expected = NullPointerException.class)
-    public void testConstructorWithUndefinedDatasetVersionService() {
-        DuplicateFileChecker duplicateFileChecker = new DuplicateFileChecker(null);
+    @Test
+    void testConstructorWithUndefinedDatasetVersionService() {
+        assertThrows(NullPointerException.class, () -> new DuplicateFileChecker(null));
     }
 
     @Test
@@ -49,20 +49,20 @@ public void testConstructorWithDefinedDatasetVersionService() {
     // test public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, String checkSum)
     // ----------------------------------------------------------------------------------------------------------
 
-    @Test(expected = NullPointerException.class)
-    public void testIsFileInSavedDatasetVersionWithCheckSumParamWithUndefinedDatasetVersion() {
+    @Test
+    void testIsFileInSavedDatasetVersionWithCheckSumParamWithUndefinedDatasetVersion() {
         DatasetVersion datasetVersion = null;
         String checkSum = "checkSum";
-
-        this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, checkSum);
+        
+        assertThrows(NullPointerException.class, () -> this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, checkSum));
     }
 
-    @Test(expected = NullPointerException.class)
-    public void testIsFileInSavedDatasetVersionWithChecksumParamWithUndefinedChecksum() {
+    @Test
+    void testIsFileInSavedDatasetVersionWithChecksumParamWithUndefinedChecksum() {
         DatasetVersion datasetVersion = new DatasetVersion();
         String checkSum = null;
-
-        this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, checkSum);
+        
+        assertThrows(NullPointerException.class, () -> this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, checkSum));
     }
 
     @Test
@@ -81,20 +81,20 @@ public void testIsFileInSavedDatasetVersionWithChecksumParamWithUnsavedFile() {
     // test public boolean isFileInSavedDatasetVersion(DatasetVersion datasetVersion, FileMetadata fileMetadata)
     // ----------------------------------------------------------------------------------------------------------
 
-    @Test(expected = NullPointerException.class)
-    public void testIsFileInSavedDatasetVersionWithFileMetadataParamWithUndefinedDatasetVersion() {
+    @Test
+    void testIsFileInSavedDatasetVersionWithFileMetadataParamWithUndefinedDatasetVersion() {
         DatasetVersion datasetVersion = null;
         FileMetadata fileMetadata = new FileMetadata();
-
-        this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, fileMetadata);
+        
+        assertThrows(NullPointerException.class, () -> this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, fileMetadata));
     }
 
-    @Test(expected = NullPointerException.class)
-    public void testIsFileInSavedDatasetVersionWithFileMetadataParamWithUndefinedFileMetadata() {
+    @Test
+    void testIsFileInSavedDatasetVersionWithFileMetadataParamWithUndefinedFileMetadata() {
         DatasetVersion datasetVersion = new DatasetVersion();
         FileMetadata fileMetadata = null;
-
-        this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, fileMetadata);
+        
+        assertThrows(NullPointerException.class, () -> this.duplicateFileChecker.isFileInSavedDatasetVersion(datasetVersion, fileMetadata));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
index f8c790a566b..c9f251f7e77 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datasetutility/OptionalFileParamsTest.java
@@ -10,19 +10,14 @@
 import edu.harvard.iq.dataverse.DataFileCategory;
 import edu.harvard.iq.dataverse.DataFileTag;
 import edu.harvard.iq.dataverse.FileMetadata;
-import java.util.ArrayList;
+
 import java.util.Arrays;
 import java.util.List;
-import java.util.ResourceBundle;
 
 import edu.harvard.iq.dataverse.util.BundleUtil;
-import org.hamcrest.Matchers;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * 
diff --git a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
index 470338d0462..bfb9134cfca 100644
--- a/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/datavariable/VariableMetadataDDIParserTest.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.datavariable;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import javax.xml.stream.XMLInputFactory;
 import javax.xml.stream.XMLStreamException;
@@ -12,7 +12,7 @@
 import java.util.HashSet;
 import java.util.Collection;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class VariableMetadataDDIParserTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/dataverse/DataverseUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/dataverse/DataverseUtilTest.java
index bf679f8fe97..b950d641bf4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/dataverse/DataverseUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/dataverse/DataverseUtilTest.java
@@ -4,15 +4,17 @@
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObjectContainer;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
-import static org.junit.Assert.assertEquals;
+
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 import java.util.HashMap;
 import java.util.Map;
 
-import javax.ws.rs.BadRequestException;
+import jakarta.ws.rs.BadRequestException;
 
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class DataverseUtilTest {
 
@@ -31,9 +33,9 @@ public void testCheckMetadataLanguageCases() {
         mLangSettingMap.put("en", "English");
         mLangSettingMap.put("fr", "French");
         Dataverse undefinedParent = new Dataverse();
-        undefinedParent.setMetadataLanguage(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE);
+        undefinedParent.setMetadataLanguage(DvObjectContainer.UNDEFINED_CODE);
         Dataset undefinedD = new Dataset();
-        undefinedD.setMetadataLanguage(DvObjectContainer.UNDEFINED_METADATA_LANGUAGE_CODE);
+        undefinedD.setMetadataLanguage(DvObjectContainer.UNDEFINED_CODE);
         Dataverse definedParent = new Dataverse();
         definedParent.setMetadataLanguage("en");
         Dataset definedEnglishD = new Dataset();
@@ -42,69 +44,30 @@ public void testCheckMetadataLanguageCases() {
         definedFrenchD.setMetadataLanguage("fr");
         Dataset definedSpanishD = new Dataset();
         definedSpanishD.setMetadataLanguage("es");
+        
         // Not set tests:
         //Good - no mLang sent, parent doesn't have one
-        try {
-            DataverseUtil.checkMetadataLangauge(undefinedD, undefinedParent, emptyMLangSettingMap);
-        } catch (BadRequestException e) {
-            Assert.fail();
-        }
+        assertDoesNotThrow(() -> DataverseUtil.checkMetadataLangauge(undefinedD, undefinedParent, emptyMLangSettingMap));
         //Bad - one sent, parent doesn't have one
-        try {
-            DataverseUtil.checkMetadataLangauge(definedEnglishD, undefinedParent, emptyMLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(definedEnglishD, undefinedParent, emptyMLangSettingMap));
         //Good - one sent, matches parent
-        try {
-            DataverseUtil.checkMetadataLangauge(definedEnglishD, definedParent, emptyMLangSettingMap);
-
-        } catch (BadRequestException e) {
-            Assert.fail();
-        }
+        assertDoesNotThrow(() -> DataverseUtil.checkMetadataLangauge(definedEnglishD, definedParent, emptyMLangSettingMap));
         //Bad - one sent, doesn't match parent
-        try {
-            DataverseUtil.checkMetadataLangauge(definedFrenchD, definedParent, emptyMLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(definedFrenchD, definedParent, emptyMLangSettingMap));
+        
         //With setting tests
-      //Bad - one sent, parent doesn't have one
-        try {
-            DataverseUtil.checkMetadataLangauge(undefinedD, undefinedParent, mLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        //Bad - one sent, parent doesn't have one
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(undefinedD, undefinedParent, mLangSettingMap));
         //Good - sent, parent undefined, is allowed by setting
-        try {
-            DataverseUtil.checkMetadataLangauge(definedEnglishD, undefinedParent, mLangSettingMap);
-        } catch (BadRequestException e) {
-            Assert.fail();
-        }
+        assertDoesNotThrow(() -> DataverseUtil.checkMetadataLangauge(definedEnglishD, undefinedParent, mLangSettingMap));
         //Bad  one sent, parent undefined, not allowed by setting
-        try {
-            DataverseUtil.checkMetadataLangauge(definedSpanishD, undefinedParent, mLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(definedSpanishD, undefinedParent, mLangSettingMap));
         //Bad - one sent, doesn't match parent
-        try {
-            DataverseUtil.checkMetadataLangauge(definedFrenchD, definedParent, mLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(definedFrenchD, definedParent, mLangSettingMap));
         //Bad - undefined sent, parent is defined
-        try {
-            DataverseUtil.checkMetadataLangauge(undefinedD, definedParent, mLangSettingMap);
-            Assert.fail();
-        } catch (BadRequestException e) {
-        }
-      //Good - sent, parent defined, they match
-        try {
-            DataverseUtil.checkMetadataLangauge(definedEnglishD, definedParent, mLangSettingMap);
-        } catch (BadRequestException e) {
-            Assert.fail();
-        }
+        assertThrows(BadRequestException.class, () -> DataverseUtil.checkMetadataLangauge(undefinedD, definedParent, mLangSettingMap));
+        //Good - sent, parent defined, they match
+        assertDoesNotThrow(() -> DataverseUtil.checkMetadataLangauge(definedEnglishD, definedParent, mLangSettingMap));
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/PermissionTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/PermissionTest.java
index ec300a25db7..4b98d3d9850 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/PermissionTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/PermissionTest.java
@@ -5,8 +5,9 @@
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.Dataverse;
 import edu.harvard.iq.dataverse.DvObject;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java
index f0ddcf4c81c..a80adb33b8d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestCommandContext.java
@@ -12,6 +12,7 @@
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.ingest.IngestServiceBean;
 import edu.harvard.iq.dataverse.pidproviders.FakePidProviderServiceBean;
+import edu.harvard.iq.dataverse.pidproviders.PermaLinkPidProviderServiceBean;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import edu.harvard.iq.dataverse.search.IndexBatchServiceBean;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
@@ -19,11 +20,11 @@
 import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
 import edu.harvard.iq.dataverse.search.savedsearch.SavedSearchServiceBean;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.storageuse.StorageUseServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean;
-import java.util.List;
 import java.util.Stack;
-import javax.persistence.EntityManager;
+import jakarta.persistence.EntityManager;
 
 /**
  * A base CommandContext for tests. Provides no-op implementations. Should
@@ -140,6 +141,11 @@ public HandlenetServiceBean handleNet() {
         return null;
     }
 
+    @Override
+    public PermaLinkPidProviderServiceBean permaLinkProvider() {
+        return null;
+    }
+    
     @Override
     public SettingsServiceBean settings() {
         return settings;
@@ -239,6 +245,11 @@ public ConfirmEmailServiceBean confirmEmail() {
     public ActionLogServiceBean actionLog() {
         return null;
     }
+    
+    @Override
+    public StorageUseServiceBean storageUse() {
+        return null;
+    }
 
     @Override
     public void beginCommandSequence() {
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/TestEntityManager.java b/src/test/java/edu/harvard/iq/dataverse/engine/TestEntityManager.java
index b1ad74ac2dc..af8b75d5d80 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/TestEntityManager.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/TestEntityManager.java
@@ -14,21 +14,21 @@
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.persistence.EntityGraph;
-import javax.persistence.EntityManager;
-import javax.persistence.EntityManagerFactory;
-import javax.persistence.EntityTransaction;
-import javax.persistence.FlushModeType;
-import javax.persistence.Id;
-import javax.persistence.LockModeType;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaDelete;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.CriteriaUpdate;
-import javax.persistence.metamodel.Metamodel;
+import jakarta.persistence.EntityGraph;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.EntityManagerFactory;
+import jakarta.persistence.EntityTransaction;
+import jakarta.persistence.FlushModeType;
+import jakarta.persistence.Id;
+import jakarta.persistence.LockModeType;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+import jakarta.persistence.TypedQuery;
+import jakarta.persistence.criteria.CriteriaBuilder;
+import jakarta.persistence.criteria.CriteriaDelete;
+import jakarta.persistence.criteria.CriteriaQuery;
+import jakarta.persistence.criteria.CriteriaUpdate;
+import jakarta.persistence.metamodel.Metamodel;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommandTest.java
index a87de12cfe0..efadd14438a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractDatasetCommandTest.java
@@ -5,10 +5,12 @@
 import edu.harvard.iq.dataverse.engine.command.CommandContext;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.*;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -17,16 +19,18 @@
 public class AbstractDatasetCommandTest {
     
    
-    @Test(expected=IllegalArgumentException.class)
-    @SuppressWarnings("ResultOfObjectAllocationIgnored")
-    public void testNullDataset() {
-        new AbstractDatasetCommandImpl(makeRequest(), null);
+    @Test
+    void testNullDataset() {
+        DataverseRequest request = makeRequest();
+        assertThrows(IllegalArgumentException.class, () -> new AbstractDatasetCommandImpl(request, null));
     }
     
-    @Test(expected=IllegalArgumentException.class)
-    @SuppressWarnings("ResultOfObjectAllocationIgnored")
-    public void testNullDatasetNonNullParent() {
-        new AbstractDatasetCommandImpl(makeRequest(), null, makeDataverse());
+    @Test
+    void testNullDatasetNonNullParent() {
+        DataverseRequest request = makeRequest();
+        Dataverse dataverse = makeDataverse();
+        assertThrows(IllegalArgumentException.class,
+            () -> new AbstractDatasetCommandImpl(request, null, dataverse));
     }
     
     /**
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java
index dd8901a05dc..a2d9cdfb917 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDatasetVersionCommandTest.java
@@ -16,9 +16,11 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -68,8 +70,8 @@ public void testSimpleVersionAddition() throws Exception {
         assertEquals(expected, testEngine.getReqiredPermissionsForObjects() );
     }
     
-    @Test(expected=IllegalCommandException.class)
-    public void testCantCreateTwoDraftVersions() throws Exception {
+    @Test
+    void testCantCreateTwoDraftVersions() {
         DatasetVersion dsvNew = new DatasetVersion();
         dsvNew.setVersionState(DatasetVersion.VersionState.DRAFT);
         Dataset sampleDataset = makeDataset();
@@ -87,7 +89,7 @@ public DatasetServiceBean datasets() {
             
         });
         
-        testEngine.submit(sut);
+        assertThrows(IllegalCommandException.class, () -> testEngine.submit(sut));
     }
     
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java
index bcbc12d5d4e..7e84cf19e6b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommandTest.java
@@ -15,14 +15,15 @@
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.authorization.users.GuestUser;
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.*;
 import edu.harvard.iq.dataverse.engine.TestCommandContext;
 import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -31,9 +32,9 @@
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.Future;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -164,7 +165,7 @@ public void deleteFacetsFor(Dataverse d) {
     TestDataverseEngine engine;
     
     
-    @Before
+    @BeforeEach
     public void setUp() {
         indexCalled = false;
         dvStore.clear();
@@ -232,8 +233,8 @@ public void testDefaultOptions() throws CommandException {
         
         // The following is a pretty wierd way to test that the create date defaults to 
         // now, but it works across date changes.
-        assertTrue( "When the supplied creation date is null, date shuld default to command execution time",
-                        Math.abs(System.currentTimeMillis() - result.getCreateDate().toInstant().toEpochMilli()) < 1000 );
+        assertTrue(Math.abs(System.currentTimeMillis() - result.getCreateDate().toInstant().toEpochMilli()) < 1000,
+            "When the supplied creation date is null, date should default to command execution time");
         
         assertTrue( result.isPermissionRoot() );
         assertTrue( result.isThemeRoot() );
@@ -298,31 +299,38 @@ public void testCustomOptions() throws CommandException {
         }
     }
     
-    @Test( expected=IllegalCommandException.class )
-    public void testCantCreateAdditionalRoot() throws Exception {
-        engine.submit( new CreateDataverseCommand(makeDataverse(), makeRequest(), null, null) );
+    @Test
+    void testCantCreateAdditionalRoot() {
+        assertThrows(IllegalCommandException.class,
+            () -> engine.submit( new CreateDataverseCommand(makeDataverse(), makeRequest(), null, null) )
+        );
     }
     
-    @Test( expected=IllegalCommandException.class )
-    public void testGuestCantCreateDataverse() throws Exception {
+    @Test
+    void testGuestCantCreateDataverse() {
         final DataverseRequest request = new DataverseRequest( GuestUser.get(), IpAddress.valueOf("::") );
         isRootDvExists = false;
-        engine.submit(new CreateDataverseCommand(makeDataverse(), request, null, null) );
+        assertThrows(IllegalCommandException.class,
+            () -> engine.submit(new CreateDataverseCommand(makeDataverse(), request, null, null) )
+        );
     }
 
-    @Test( expected=IllegalCommandException.class )
-    public void testCantCreateAnotherWithSameAlias() throws Exception {
+    @Test
+    void testCantCreateAnotherWithSameAlias() {
         
         String alias = "alias";
         final Dataverse dvFirst = makeDataverse();
         dvFirst.setAlias(alias);
         dvFirst.setOwner( makeDataverse() );
-        engine.submit(new CreateDataverseCommand(dvFirst, makeRequest(), null, null) );
+        assertThrows(IllegalCommandException.class,
+            () -> engine.submit(new CreateDataverseCommand(dvFirst, makeRequest(), null, null) ));
         
         final Dataverse dv = makeDataverse();
         dv.setOwner( makeDataverse() );
         dv.setAlias(alias);
-        engine.submit(new CreateDataverseCommand(dv, makeRequest(), null, null) );
+        assertThrows(IllegalCommandException.class,
+            () -> engine.submit(new CreateDataverseCommand(dv, makeRequest(), null, null) )
+        );
     }
     
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
index aafad58654e..33f9acd0e1a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
@@ -14,12 +14,13 @@
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import java.util.ArrayList;
 import java.util.List;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class CreatePrivateUrlCommandTest {
 
@@ -31,7 +32,7 @@ public class CreatePrivateUrlCommandTest {
     private final Long versionIsReleased = 4l;
     
     
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = new Dataset();
         testEngine = new TestDataverseEngine(new TestCommandContext() {
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java
index 243285e69ab..3d947879e56 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommandTest.java
@@ -14,12 +14,15 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
-import static org.junit.Assert.assertTrue;
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Matchers;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.TypedQuery;
+
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.ArgumentMatchers;
+
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
@@ -50,13 +53,13 @@ public EntityManager em() {
         }
     });
     
-    @Before
+    @BeforeEach
     public void before() {
         saveCalled = false;
     }
     
-    @Test( expected = IllegalCommandException.class )
-    public void testNonSuperUsersCantAddRoles() throws CommandException {
+    @Test
+    void testNonSuperUsersCantAddRoles() {
         DataverseRole dvr = new DataverseRole();
         dvr.setAlias("roleTest");
         dvr.setName("Tester Role");
@@ -69,8 +72,7 @@ public void testNonSuperUsersCantAddRoles() throws CommandException {
         normalUser.setSuperuser(false);
         
         CreateRoleCommand sut = new CreateRoleCommand(dvr, new DataverseRequest(normalUser,IpAddress.valueOf("89.17.33.33")), dv);
-        engine.submit(sut);
-    
+        assertThrows(IllegalCommandException.class, () -> engine.submit(sut));
     }
    
     @Test
@@ -88,12 +90,12 @@ public void testSuperUsersAddRoles() throws CommandException {
         
         CreateRoleCommand sut = new CreateRoleCommand(dvr, new DataverseRequest(normalUser,IpAddress.valueOf("89.17.33.33")), dv);
         engine.submit(sut);
-        assertTrue( "CreateRoleCommand did not call save on the created role.", saveCalled );
+        assertTrue(saveCalled, "CreateRoleCommand did not call save on the created role.");
     
     }
     
-    @Test( expected = IllegalCommandException.class )
-    public void testGuestUsersCantAddRoles() throws CommandException {
+    @Test
+    void testGuestUsersCantAddRoles() {
         DataverseRole dvr = new DataverseRole();
         dvr.setAlias("roleTest");
         dvr.setName("Tester Role");
@@ -103,7 +105,7 @@ public void testGuestUsersCantAddRoles() throws CommandException {
         dvr.setOwner(dv);
         
         CreateRoleCommand sut = new CreateRoleCommand(dvr, new DataverseRequest(GuestUser.get(),IpAddress.valueOf("89.17.33.33")), dv);
-        engine.submit(sut);    
+        assertThrows(IllegalCommandException.class, () -> engine.submit(sut));
     }
     
     private class LocalTestEntityManager extends TestEntityManager {
@@ -128,7 +130,7 @@ public <T> TypedQuery<T> createNamedQuery(String name, Class<T> resultClass) {
             //Mocking a query to return no results when 
             //checking for existing role in DB
             TypedQuery mockedQuery = mock(TypedQuery.class);
-            when(mockedQuery.setParameter(Matchers.anyString(), Matchers.anyObject())).thenReturn(mockedQuery);
+            when(mockedQuery.setParameter(ArgumentMatchers.anyString(), ArgumentMatchers.any())).thenReturn(mockedQuery);
             when(mockedQuery.getSingleResult()).thenReturn(null);
             return mockedQuery;
         }
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommandTest.java
index a0e79268e3d..70e65bfe34c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DRSSubmitToArchiveCommandTest.java
@@ -1,19 +1,20 @@
 package edu.harvard.iq.dataverse.engine.command.impl;
 
 import org.erdtman.jcs.JsonCanonicalizer;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import com.auth0.jwt.JWT;
 import com.auth0.jwt.algorithms.Algorithm;
 import com.auth0.jwt.interfaces.DecodedJWT;
 
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
+
 import java.security.KeyFactory;
 import java.security.interfaces.RSAPrivateKey;
 //import java.security.interfaces.RSAPublicKey;
 import java.security.spec.PKCS8EncodedKeySpec;
 import java.util.Base64;
 
+import static org.junit.jupiter.api.Assertions.fail;
 
 public class DRSSubmitToArchiveCommandTest {
 
@@ -113,7 +114,7 @@ public void createJWT() throws CommandException {
             System.out.println(e.getClass() + e.getLocalizedMessage());
             e.printStackTrace();
             //Any exception is a failure, otherwise decoding worked.
-            Assert.fail(e.getLocalizedMessage());
+            fail(e.getLocalizedMessage());
         }
 
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java
index 74c8c269b4b..0a4e5ed2d7e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/DeletePrivateUrlCommandTest.java
@@ -13,10 +13,11 @@
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
 import java.util.ArrayList;
 import java.util.List;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class DeletePrivateUrlCommandTest {
 
@@ -25,7 +26,7 @@ public class DeletePrivateUrlCommandTest {
     private final Long noPrivateUrlToDelete = 1l;
     private final Long hasPrivateUrlToDelete = 2l;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         testEngine = new TestDataverseEngine(new TestCommandContext() {
             @Override
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommandTest.java
index 24c48fd257b..2c9f050b92f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommandTest.java
@@ -9,8 +9,9 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import java.util.ArrayList;
 import java.util.List;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeRequest;
 
 /**
@@ -28,9 +29,9 @@ public void testLatestPublishedNoDraft() throws CommandException {
         List<DatasetVersion> versions = make10Versions(ds);
         ds.setVersions(versions);
         
-        assertEquals( 10l, engine.submit(new GetLatestPublishedDatasetVersionCommand(makeRequest(), ds)).getVersionNumber().longValue() );
-        assertTrue( "Published datasets should require no permissions to view",
-                        engine.getReqiredPermissionsForObjects().get(ds).isEmpty() );
+        assertEquals(10L, engine.submit(new GetLatestPublishedDatasetVersionCommand(makeRequest(), ds)).getVersionNumber().longValue());
+        assertTrue(engine.getReqiredPermissionsForObjects().get(ds).isEmpty(),
+            "Published datasets should require no permissions to view");
     }
     
     @Test
@@ -41,9 +42,9 @@ public void testLatestPublishedWithDraft() throws CommandException {
         versions.add( MocksFactory.makeDatasetVersion(ds.getCategories()) );
         ds.setVersions(versions);
         
-        assertEquals( 10l, engine.submit(new GetLatestPublishedDatasetVersionCommand(makeRequest(), ds)).getVersionNumber().longValue() );
-        assertTrue( "Published datasets should require no permissions to view",
-                        engine.getReqiredPermissionsForObjects().get(ds).isEmpty() );
+        assertEquals(10L, engine.submit(new GetLatestPublishedDatasetVersionCommand(makeRequest(), ds)).getVersionNumber().longValue());
+        assertTrue(engine.getReqiredPermissionsForObjects().get(ds).isEmpty(),
+            "Published datasets should require no permissions to view");
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommandTest.java
index b5019807ac1..47174643a1c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/GetPrivateUrlCommandTest.java
@@ -5,13 +5,12 @@
 import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
 import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import static org.junit.Assert.assertNull;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class GetPrivateUrlCommandTest {
 
@@ -21,15 +20,15 @@ public class GetPrivateUrlCommandTest {
     public GetPrivateUrlCommandTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
         testEngine = new TestDataverseEngine(new TestCommandContext() {
 
@@ -48,7 +47,7 @@ public PrivateUrl getPrivateUrlFromDatasetId(long datasetId) {
         });
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommandTest.java
index 520c91f47ff..0701454113b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ListMetadataBlocksCommandTest.java
@@ -9,8 +9,8 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.util.Arrays;
@@ -26,7 +26,7 @@ public class ListMetadataBlocksCommandTest {
     private Dataverse dataverse;
     private DataverseMetadataBlockFacet metadataBlockFacet;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         dataverseRequest = Mockito.mock(DataverseRequest.class);
         dataverse = Mockito.mock(Dataverse.class);
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
index 34f03702243..ed6112539ed 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDatasetCommandTest.java
@@ -31,30 +31,30 @@
 import java.util.Date;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.Future;
-import javax.persistence.EntityGraph;
-import javax.persistence.EntityManager;
-import javax.persistence.EntityManagerFactory;
-import javax.persistence.EntityTransaction;
-import javax.persistence.FlushModeType;
-import javax.persistence.LockModeType;
-import javax.persistence.Query;
-import javax.persistence.StoredProcedureQuery;
-import javax.persistence.TypedQuery;
-import javax.persistence.criteria.CriteriaBuilder;
-import javax.persistence.criteria.CriteriaDelete;
-import javax.persistence.criteria.CriteriaQuery;
-import javax.persistence.criteria.CriteriaUpdate;
-import javax.persistence.metamodel.Metamodel;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.core.Context;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
+
+import jakarta.persistence.EntityGraph;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.EntityManagerFactory;
+import jakarta.persistence.EntityTransaction;
+import jakarta.persistence.FlushModeType;
+import jakarta.persistence.LockModeType;
+import jakarta.persistence.Query;
+import jakarta.persistence.StoredProcedureQuery;
+import jakarta.persistence.TypedQuery;
+import jakarta.persistence.criteria.CriteriaBuilder;
+import jakarta.persistence.criteria.CriteriaDelete;
+import jakarta.persistence.criteria.CriteriaQuery;
+import jakarta.persistence.criteria.CriteriaUpdate;
+import jakarta.persistence.metamodel.Metamodel;
+import jakarta.servlet.http.HttpServletRequest;
+import jakarta.ws.rs.core.Context;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.fail;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -71,7 +71,7 @@ public class MoveDatasetCommandTest {
         @Context
         protected HttpServletRequest httpRequest;
 	
-    @Before
+    @BeforeEach
     public void setUp() {
 
         auth = makeAuthenticatedUser("Super", "User");
@@ -190,8 +190,7 @@ public Long findCountResponsesForGivenDataset(Long guestbookId, Long datasetId)
             public IndexServiceBean index(){
                 return new IndexServiceBean(){
                     @Override
-                    public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp){
-                        return null;
+                    public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp){
                     }
                 };
             }
@@ -284,13 +283,11 @@ public void testRemoveGuestbook() throws Exception {
 	 * Moving DS to its owning DV 
         * @throws IllegalCommandException
 	 */
-    @Test(expected = IllegalCommandException.class)
-    public void testInvalidMove() throws Exception {
-
+    @Test
+    void testInvalidMove() {
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                new MoveDatasetCommand(aRequest, moved, root, false));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, root, false)));
     }
         
     /**
@@ -302,14 +299,13 @@ public void testInvalidMove() throws Exception {
      * Ignoring after permissions change in 47fb045. Did that change make this
      * case untestable? Unclear.
      */
-    @Ignore
-    @Test(expected = PermissionException.class)
-    public void testAuthenticatedUserWithNoRole() throws Exception {
+    @Disabled("Unstable test. Disabled since #5115 by @pdurbin. See commit 7a917177")
+    @Test
+    void testAuthenticatedUserWithNoRole() {
 
         DataverseRequest aRequest = new DataverseRequest(nobody, httpRequest);
-        testEngine.submit(
-                new MoveDatasetCommand(aRequest, moved, childA, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, childA, null)));
     }
 
     /**
@@ -318,25 +314,23 @@ public void testAuthenticatedUserWithNoRole() throws Exception {
      *
      * @throws java.lang.Exception
      */
-    @Test(expected = PermissionException.class)
-    public void testNotAuthenticatedUser() throws Exception {
+    @Test
+    void testNotAuthenticatedUser() {
 
         DataverseRequest aRequest = new DataverseRequest(GuestUser.get(), httpRequest);
-        testEngine.submit(
-                new MoveDatasetCommand(aRequest, moved, root, null));
-        fail();
+        assertThrows(PermissionException.class,
+            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, root, null)));
     }
     
     	/**
 	 * Moving published  DS to unpublished DV
         * @throws IllegalCommandException
 	 */
-    @Test(expected = IllegalCommandException.class)
-    public void testInvalidMovePublishedToUnpublished() throws Exception {
+    @Test
+    void testInvalidMovePublishedToUnpublished() {
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                new MoveDatasetCommand(aRequest, moved, childDraft, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDatasetCommand(aRequest, moved, childDraft, null)));
     }
          
         
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommandTest.java
index 13b60f875d5..3c3188da830 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/MoveDataverseCommandTest.java
@@ -25,14 +25,14 @@
 import java.util.Date;
 import java.util.List;
 import java.util.concurrent.Future;
-import javax.persistence.EntityManager;
-import javax.servlet.http.HttpServletRequest;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.fail;
-import org.junit.Before;
-import org.junit.Test;
+import jakarta.persistence.EntityManager;
+import jakarta.servlet.http.HttpServletRequest;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.fail;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -49,7 +49,7 @@ public class MoveDataverseCommandTest {
     AuthenticatedUser auth, nobody;
     protected HttpServletRequest httpRequest;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         // authentication 
         auth = makeAuthenticatedUser("Super", "User");
@@ -221,8 +221,7 @@ public Future<String> indexDataverse(Dataverse dataverse){
                     }
 
                     @Override
-                    public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp){
-                        return null;
+                    public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp){
                     }
                     @Override
                     public Future<String> indexDataverseInNewTransaction(Dataverse dataverse){
@@ -230,8 +229,7 @@ public Future<String> indexDataverseInNewTransaction(Dataverse dataverse){
                     }
 
                     @Override
-                    public Future<String> indexDatasetInNewTransaction(Long id){
-                        return null;
+                    public void indexDatasetInNewTransaction(Long id){
                     }                    
                 };
 
@@ -304,43 +302,39 @@ public void testValidMove() throws Exception {
     /**
      * Moving ChildA to its child (illegal).
      */
-    @Test( expected=IllegalCommandException.class )
-    public void testInvalidMove() throws Exception {
+    @Test
+    void testInvalidMove() {
         System.out.println("testInvalidMove");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, childA, grandchildAA, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, childA, grandchildAA, null)));
     }
     
     /**
      * Calling API as a non super user (illegal).
      */
-    @Test(expected = PermissionException.class)
-    public void testNotSuperUser() throws Exception {
+    @Test
+    void testNotSuperUser() {
         System.out.println("testNotSuperUser");
         DataverseRequest aRequest = new DataverseRequest(nobody, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, childB, childA, null));
-        fail();
+        assertThrows(PermissionException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, childB, childA, null)));
     }
     
-    @Test( expected=IllegalCommandException.class )
-    public void testMoveIntoSelf() throws Exception {
+    @Test
+    void testMoveIntoSelf() {
         System.out.println("testMoveIntoSelf");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, childB, childB, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, childB, childB, null)));
     }
     
-    @Test( expected=IllegalCommandException.class )
-    public void testMoveIntoParent() throws Exception {
+    @Test
+    void testMoveIntoParent() {
         System.out.println("testMoveIntoParent");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, grandchildAA, childA, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, grandchildAA, childA, null)));
     }
     
     @Test
@@ -357,13 +351,12 @@ public void testKeepGuestbook() throws Exception {
         assertEquals( root, childC.getOwner() );
     }
     
-    @Test(expected = IllegalCommandException.class)
-    public void testRemoveGuestbookWithoutForce() throws Exception {
+    @Test
+    void testRemoveGuestbookWithoutForce() {
         System.out.println("testRemoveGuestbookWithoutForce");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, grandchildCC, root, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, grandchildCC, root, null)));
     }
     
     @Test
@@ -395,13 +388,12 @@ public void testKeepTemplate() throws Exception {
         
     }
     
-    @Test(expected = IllegalCommandException.class)
-    public void testRemoveTemplateWithoutForce() throws Exception {
+    @Test
+    void testRemoveTemplateWithoutForce() {
         System.out.println("testRemoveTemplateWithoutForce");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, grandchildDD, root, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, grandchildDD, root, null)));
     }
     
     @Test
@@ -432,13 +424,12 @@ public void testKeepMetadataBlock() throws Exception {
         assertEquals( root, childE.getOwner() );
     }
     
-    @Test(expected = IllegalCommandException.class)
-    public void testRemoveMetadataBlockWithoutForce() throws Exception {
+    @Test
+    void testRemoveMetadataBlockWithoutForce() {
         System.out.println("testRemoveMetadataBlockWithoutForce");
         DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
-        testEngine.submit(
-                        new MoveDataverseCommand(aRequest, grandchildEE, root, null));
-        fail();
+        assertThrows(IllegalCommandException.class,
+            () -> testEngine.submit(new MoveDataverseCommand(aRequest, grandchildEE, root, null)));
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommandTest.java
index 5fdef3ed74c..7609ef17d3e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RequestRsyncScriptCommandTest.java
@@ -9,14 +9,14 @@
 import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import static edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key.DataCaptureModuleUrl;
-import javax.servlet.http.HttpServletRequest;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import jakarta.servlet.http.HttpServletRequest;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class RequestRsyncScriptCommandTest {
 
@@ -26,15 +26,15 @@ public class RequestRsyncScriptCommandTest {
     public RequestRsyncScriptCommandTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
         testEngine = new TestDataverseEngine(new TestCommandContext() {
 
@@ -66,7 +66,7 @@ public SettingsServiceBean settings() {
         });
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommandTest.java
index 7b663389a3a..2b1dbc4c64a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/RestrictFileCommandTest.java
@@ -18,13 +18,15 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import java.sql.Timestamp;
 import java.util.Date;
-import org.junit.After;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 
 /**
@@ -44,15 +46,15 @@ public class RestrictFileCommandTest {
     public RestrictFileCommandTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = makeDataset();
         file = makeDataFile();
@@ -74,7 +76,7 @@ public boolean isTrueForKey(SettingsServiceBean.Key key, boolean defaultValue) {
             
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
     }
         
@@ -247,7 +249,7 @@ public void testUnrestrictUnrestrictedNewFile() throws Exception {
         
     }
 
-    @Test 
+    @Test
     public void testPublicInstall() throws CommandException {
         file.setOwner(dataset);
         String expected = "Restricting files is not permitted on a public installation.";
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java
index 8b5556b70c9..23cc4547bc4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/ReturnDatasetToAuthorCommandTest.java
@@ -24,14 +24,14 @@
 import edu.harvard.iq.dataverse.workflows.WorkflowComment;
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.Future;
-import javax.persistence.EntityManager;
-import javax.servlet.http.HttpServletRequest;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import org.junit.Before;
-import org.junit.Test;
+
+import jakarta.persistence.EntityManager;
+import jakarta.servlet.http.HttpServletRequest;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class ReturnDatasetToAuthorCommandTest {
 
@@ -39,7 +39,7 @@ public class ReturnDatasetToAuthorCommandTest {
     private DataverseRequest dataverseRequest;
     private TestDataverseEngine testEngine;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = new Dataset();
 
@@ -61,8 +61,7 @@ public AuthenticatedUser getAuthenticatedUser(String id) {
             public IndexServiceBean index() {
                 return new IndexServiceBean() {
                     @Override
-                    public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) {
-                        return null;
+                    public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) {
                     }
                 };
             }
@@ -142,9 +141,10 @@ public List<AuthenticatedUser> getUsersWithPermissionOn(Permission permission, D
             throw new IllegalCommandException("You must enter a reason for returning a dataset to its author.", this);
         }
      */
-    @Test(expected=IllegalArgumentException.class)
-    public void testDatasetNull() throws CommandException {
-        new ReturnDatasetToAuthorCommand(dataverseRequest, null, "");
+    @Test
+    void testDatasetNull() {
+        assertThrows(IllegalArgumentException.class,
+            () -> new ReturnDatasetToAuthorCommand(dataverseRequest, null, ""));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java
index 1098b10a041..700ba332247 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/SubmitDatasetForReviewCommandTest.java
@@ -24,13 +24,13 @@
 import edu.harvard.iq.dataverse.search.IndexServiceBean;
 import java.util.Collections;
 import java.util.List;
-import java.util.concurrent.Future;
-import javax.persistence.EntityManager;
-import javax.servlet.http.HttpServletRequest;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import org.junit.Before;
-import org.junit.Test;
+
+import jakarta.persistence.EntityManager;
+import jakarta.servlet.http.HttpServletRequest;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class SubmitDatasetForReviewCommandTest {
 
@@ -38,7 +38,7 @@ public class SubmitDatasetForReviewCommandTest {
     private DataverseRequest dataverseRequest;
     private TestDataverseEngine testEngine;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = new Dataset();
 
@@ -60,8 +60,7 @@ public AuthenticatedUser getAuthenticatedUser(String id) {
             public IndexServiceBean index() {
                 return new IndexServiceBean() {
                     @Override
-                    public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) {
-                        return null;
+                    public void asyncIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) {
                     }
                 };
             }
@@ -137,9 +136,10 @@ public List<AuthenticatedUser> getUsersWithPermissionOn(Permission permission, D
         );
     }
 
-    @Test( expected=IllegalArgumentException.class )
-    public void testDatasetNull() {
-        new SubmitDatasetForReviewCommand(dataverseRequest, null);
+    @Test
+    void testDatasetNull() {
+        assertThrows(IllegalArgumentException.class,
+            () -> new SubmitDatasetForReviewCommand(dataverseRequest, null));
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetThumbnailCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetThumbnailCommandTest.java
index f55e9a2d085..34ea7810574 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetThumbnailCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDatasetThumbnailCommandTest.java
@@ -9,12 +9,13 @@
 import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import edu.harvard.iq.dataverse.util.SystemConfig;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class UpdateDatasetThumbnailCommandTest {
 
@@ -26,15 +27,15 @@ public class UpdateDatasetThumbnailCommandTest {
     public UpdateDatasetThumbnailCommandTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
         dataset = new Dataset();
         testEngine = new TestDataverseEngine(new TestCommandContext() {
@@ -86,7 +87,7 @@ public String getDataverseSiteUrl() {
         );
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetRootCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetRootCommandTest.java
index 711e7881af5..4f6a1a1f678 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetRootCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetRootCommandTest.java
@@ -8,8 +8,8 @@
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
 
@@ -26,7 +26,7 @@ public class UpdateMetadataBlockFacetRootCommandTest {
     private DataverseRequest dataverseRequest;
     private Dataverse dataverse;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         dataverseRequest = Mockito.mock(DataverseRequest.class);
         dataverse = Mockito.mock(Dataverse.class);
@@ -43,7 +43,7 @@ public void should_not_update_dataverse_when_root_value_does_not_change() throws
 
         Mockito.verify(dataverse).isMetadataBlockFacetRoot();
         Mockito.verifyNoMoreInteractions(dataverse);
-        Mockito.verifyZeroInteractions(context.dataverses());
+        Mockito.verifyNoInteractions(context.dataverses());
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetsCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetsCommandTest.java
index 2d64de80f3d..51892f20df3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetsCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateMetadataBlockFacetsCommandTest.java
@@ -9,8 +9,9 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.util.Arrays;
@@ -26,20 +27,20 @@ public class UpdateMetadataBlockFacetsCommandTest {
     private DataverseRequest dataverseRequest;
     private Dataverse dataverse;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         dataverseRequest = Mockito.mock(DataverseRequest.class);
         dataverse = Mockito.mock(Dataverse.class);
     }
 
-    @Test(expected = IllegalCommandException.class)
-    public void should_throw_IllegalCommandException_when_dataverse_is_not_metadata_facet_root() throws CommandException {
+    @Test
+    void should_throw_IllegalCommandException_when_dataverse_is_not_metadata_facet_root() {
         Mockito.when(dataverse.isMetadataBlockFacetRoot()).thenReturn(false);
 
         UpdateMetadataBlockFacetsCommand target = new UpdateMetadataBlockFacetsCommand(dataverseRequest, dataverse, Collections.emptyList());
 
         CommandContext context = Mockito.mock(CommandContext.class, Mockito.RETURNS_DEEP_STUBS);
-        target.execute(context);
+        Assertions.assertThrows(IllegalCommandException.class, () -> target.execute(context));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdatePermissionRootCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdatePermissionRootCommandTest.java
index 3dced0aaf05..1a46a8803a6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdatePermissionRootCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/UpdatePermissionRootCommandTest.java
@@ -7,10 +7,11 @@
 import edu.harvard.iq.dataverse.engine.TestCommandContext;
 import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
 import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -22,7 +23,7 @@ public class UpdatePermissionRootCommandTest {
     TestCommandContext testCommandContext;
     boolean serviceBeanCalled;
     
-    @Before
+    @BeforeEach
     public void setUp() {
         mockBean = new DataverseServiceBean() {
             @Override
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/CleanupTest.java b/src/test/java/edu/harvard/iq/dataverse/export/CleanupTest.java
deleted file mode 100644
index 88dd3d12ffb..00000000000
--- a/src/test/java/edu/harvard/iq/dataverse/export/CleanupTest.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package edu.harvard.iq.dataverse.export;
-
-import edu.harvard.iq.dataverse.export.openaire.Cleanup;
-import org.junit.Test;
-import static org.junit.Assert.*;
-
-/**
- *
- * @author francesco.cadili@4science.it
- */
-public class CleanupTest {
-    
-    /**
-     * full name or organization name cleanup.
-     *
-     * Name is composed of:
-     * <First Names> <Family Name>
-     */
-    @Test
-    public void testNormalize() {
-        assertEquals(Cleanup.normalize("    Francesco    "), "Francesco");
-        assertEquals(Cleanup.normalize("Francesco  Cadili "), "Francesco Cadili");
-        assertEquals(Cleanup.normalize("  Cadili,Francesco"), "Cadili, Francesco");
-        assertEquals(Cleanup.normalize("Cadili,     Francesco  "), "Cadili, Francesco");
-        assertEquals(Cleanup.normalize(null), "");
-        
-        // TODO: organization examples...
-    }
-}
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java
index c9446d7c414..0eb231dd866 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/DDIExporterTest.java
@@ -10,12 +10,13 @@
 import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
 import edu.harvard.iq.dataverse.branding.BrandingUtilTest;
 import edu.harvard.iq.dataverse.export.ddi.DdiExportUtil;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.ExportException;
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
 import edu.harvard.iq.dataverse.mocks.MockDatasetFieldSvc;
-import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.json.JsonParseException;
-import edu.harvard.iq.dataverse.util.json.JsonParser;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.xml.XmlPrinter;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -34,9 +35,8 @@
 import java.util.HashSet;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import org.junit.jupiter.api.BeforeAll;
@@ -44,6 +44,7 @@
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 import org.xmlunit.assertj3.XmlAssert;
+import org.xmlunit.builder.Input;
 
 public class DDIExporterTest {
 
@@ -79,16 +80,21 @@ public void testExportDataset() throws JsonParseException, IOException, ExportEx
         String datasetDtoJsonString = Files.readString(Path.of("src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json"), StandardCharsets.UTF_8);
         
         JsonObject datasetDtoJson = Json.createReader(new StringReader(datasetDtoJsonString)).readObject();
-        DatasetVersion datasetVersion = gson.fromJson(datasetDtoJson.getJsonObject("datasetVersion").toString(), DatasetVersion.class);
+        
+        ExportDataProvider exportDataProviderStub = Mockito.mock(ExportDataProvider.class);
+        Mockito.when(exportDataProviderStub.getDatasetJson()).thenReturn(datasetDtoJson);
+        Mockito.when(exportDataProviderStub.getDatasetFileDetails()).thenReturn(Json.createArrayBuilder().build());
+        
         
         //when
         ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
-        new DDIExporter().exportDataset(datasetVersion, datasetDtoJson, byteArrayOutputStream);
+        new DDIExporter().exportDataset(exportDataProviderStub, byteArrayOutputStream);
 
         // then
         String xml = XmlPrinter.prettyPrintXml(byteArrayOutputStream.toString(StandardCharsets.UTF_8));
-        XmlAssert.assertThat(xml).isInvalid();
-        logger.severe("DDIExporterTest.testExportDataset() creates XML but it's invalid. Fixing in DDIExportUtil required.");
+        logger.fine(xml);
+        XmlAssert.assertThat(xml).isValidAgainst(Input.fromPath(Path.of("src/test/resources/xml/xsd/ddi-codebook-2.5/ddi_codebook_2_5.xsd")).build());
+        logger.severe("DDIExporterTest.testExportDataset() creates XML that should now be valid, since DDIExportUtil has been fixed.");
     }
 
     @Test
@@ -96,13 +102,15 @@ public void testExportDatasetContactEmailPresent() throws Exception {
         File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/datasetContactEmailPresent.json");
         String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
 
-        JsonReader jsonReader = Json.createReader(new StringReader(datasetVersionAsJson));
-        JsonObject json = jsonReader.readObject();
-        JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService);
-        DatasetVersion version = jsonParser.parseDatasetVersion(json.getJsonObject("datasetVersion"));
+        JsonObject json = JsonUtil.getJsonObject(datasetVersionAsJson);
+        
+        ExportDataProvider exportDataProviderStub = Mockito.mock(ExportDataProvider.class);
+        Mockito.when(exportDataProviderStub.getDatasetJson()).thenReturn(json);
+        Mockito.when(exportDataProviderStub.getDatasetFileDetails()).thenReturn(Json.createArrayBuilder().build());
+        
         ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
         DDIExporter instance = new DDIExporter();
-        instance.exportDataset(version, json, byteArrayOutputStream);
+        instance.exportDataset(exportDataProviderStub, byteArrayOutputStream);
 
         logger.fine(XmlPrinter.prettyPrintXml(byteArrayOutputStream.toString()));
         assertTrue(byteArrayOutputStream.toString().contains("finch@mailinator.com"));
@@ -114,13 +122,15 @@ public void testExportDatasetContactEmailAbsent() throws Exception {
         File datasetVersionJson = new File("src/test/java/edu/harvard/iq/dataverse/export/ddi/datasetContactEmailAbsent.json");
         String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
 
-        JsonReader jsonReader = Json.createReader(new StringReader(datasetVersionAsJson));
-        JsonObject json = jsonReader.readObject();
-        JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService);
-        DatasetVersion version = jsonParser.parseDatasetVersion(json.getJsonObject("datasetVersion"));
+        JsonObject json = JsonUtil.getJsonObject(datasetVersionAsJson);
+        
+        ExportDataProvider exportDataProviderStub = Mockito.mock(ExportDataProvider.class);
+        Mockito.when(exportDataProviderStub.getDatasetJson()).thenReturn(json);
+        Mockito.when(exportDataProviderStub.getDatasetFileDetails()).thenReturn(Json.createArrayBuilder().build());
+        
         ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
         DDIExporter instance = new DDIExporter();
-        instance.exportDataset(version, json, byteArrayOutputStream);
+        instance.exportDataset(exportDataProviderStub, byteArrayOutputStream);
 
         logger.fine(XmlPrinter.prettyPrintXml(byteArrayOutputStream.toString()));
         assertFalse(byteArrayOutputStream.toString().contains("finch@mailinator.com"));
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java
index 7f7cc203506..76ca853d5cc 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExportUtilTest.java
@@ -6,7 +6,10 @@
 package edu.harvard.iq.dataverse.export;
 
 import com.google.gson.Gson;
+
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.HandlenetServiceBean;
 import edu.harvard.iq.dataverse.api.dto.DatasetDTO;
 import edu.harvard.iq.dataverse.api.dto.DatasetVersionDTO;
 import edu.harvard.iq.dataverse.export.openaire.OpenAireExportUtil;
@@ -53,10 +56,10 @@ public void testWriteIdentifierElementDoi() throws XMLStreamException {
         String persistentAgency = "doi";
         String persistentAuthority = "10.123";
         String persistentId = "123";
-        GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId);
+        GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId, null, DOIServiceBean.DOI_RESOLVER_URL, null);
 
         // when
-        OpenAireExportUtil.writeIdentifierElement(xmlWriter, globalId.toURL().toString(), null);
+        OpenAireExportUtil.writeIdentifierElement(xmlWriter, globalId.asURL(), null);
         xmlWriter.flush();
 
         // then
@@ -73,10 +76,10 @@ public void testWriteIdentifierElementHandle() throws XMLStreamException {
         String persistentAgency = "hdl";
         String persistentAuthority = "1902.1";
         String persistentId = "111012";
-        GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId);
+        GlobalId globalId = new GlobalId(persistentAgency, persistentAuthority, persistentId, null, HandlenetServiceBean.HDL_RESOLVER_URL, null);
 
         // when
-        OpenAireExportUtil.writeIdentifierElement(xmlWriter, globalId.toURL().toString(), null);
+        OpenAireExportUtil.writeIdentifierElement(xmlWriter, globalId.asURL(), null);
         xmlWriter.flush();
 
         // then
@@ -457,23 +460,29 @@ public void testWriteContributorsElementComplete() throws XMLStreamException, IO
                 + "<affiliation>ContactAffiliation3</affiliation>"
                 + "</contributor>"
                 + "<contributor contributorType=\"Producer\">"
-                + "<contributorName>LastProducer1, FirstProducer1</contributorName>"
+                + "<contributorName nameType=\"Personal\">LastProducer1, FirstProducer1</contributorName>"
+                + "<givenName>FirstProducer1</givenName><familyName>LastProducer1</familyName>"
                 + "<affiliation>ProducerAffiliation1</affiliation>"
                 + "</contributor><contributor contributorType=\"Producer\">"
-                + "<contributorName>LastProducer2, FirstProducer2</contributorName>"
+                + "<contributorName nameType=\"Personal\">LastProducer2, FirstProducer2</contributorName>"
+                + "<givenName>FirstProducer2</givenName><familyName>LastProducer2</familyName>"
                 + "<affiliation>ProducerAffiliation2</affiliation>"
                 + "</contributor>"
                 + "<contributor contributorType=\"DataCollector\">"
-                + "<contributorName>LastContributor1, FirstContributor1</contributorName>"
+                + "<contributorName nameType=\"Personal\">LastContributor1, FirstContributor1</contributorName>"
+                + "<givenName>FirstContributor1</givenName><familyName>LastContributor1</familyName>"
                 + "</contributor>"
                 + "<contributor contributorType=\"DataCurator\">"
-                + "<contributorName>LastContributor2, FirstContributor2</contributorName>"
+                + "<contributorName nameType=\"Personal\">LastContributor2, FirstContributor2</contributorName>"
+                + "<givenName>FirstContributor2</givenName><familyName>LastContributor2</familyName>"
                 + "</contributor><contributor contributorType=\"Distributor\">"
-                + "<contributorName>LastDistributor1, FirstDistributor1</contributorName>"
+                + "<contributorName nameType=\"Personal\">LastDistributor1, FirstDistributor1</contributorName>"
+                + "<givenName>FirstDistributor1</givenName><familyName>LastDistributor1</familyName>"
                 + "<affiliation>DistributorAffiliation1</affiliation>"
                 + "</contributor>"
                 + "<contributor contributorType=\"Distributor\">"
-                + "<contributorName>LastDistributor2, FirstDistributor2</contributorName>"
+                + "<contributorName nameType=\"Personal\">LastDistributor2, FirstDistributor2</contributorName>"
+                + "<givenName>FirstDistributor2</givenName><familyName>LastDistributor2</familyName>"
                 + "<affiliation>DistributorAffiliation2</affiliation>"
                 + "</contributor>"
                 + "</contributors>",
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExporterTest.java
index 505496fca7a..2d06436fb33 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExporterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/OpenAireExporterTest.java
@@ -1,8 +1,12 @@
 package edu.harvard.iq.dataverse.export;
 
-import com.jayway.restassured.path.xml.XmlPath;
-import edu.harvard.iq.dataverse.DatasetVersion;
+import io.restassured.path.xml.XmlPath;
 import edu.harvard.iq.dataverse.util.xml.XmlPrinter;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.XMLExporter;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -10,13 +14,13 @@
 import java.io.StringReader;
 import java.nio.file.Files;
 import java.nio.file.Paths;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
-import static junit.framework.Assert.assertEquals;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
 import org.xml.sax.ErrorHandler;
 import org.xml.sax.InputSource;
 import org.xml.sax.SAXException;
@@ -29,6 +33,7 @@ public class OpenAireExporterTest {
     public OpenAireExporterTest() {
         openAireExporter = new OpenAireExporter();
     }
+    
 
     /**
      * Test of getProviderName method, of class OpenAireExporter.
@@ -38,7 +43,7 @@ public void testGetProviderName() {
         System.out.println("getProviderName");
         OpenAireExporter instance = new OpenAireExporter();
         String expResult = "oai_datacite";
-        String result = instance.getProviderName();
+        String result = instance.getFormatName();
         assertEquals(expResult, result);
     }
 
@@ -50,7 +55,7 @@ public void testGetDisplayName() {
         System.out.println("getDisplayName");
         OpenAireExporter instance = new OpenAireExporter();
         String expResult = "OpenAIRE";
-        String result = instance.getDisplayName();
+        String result = instance.getDisplayName(null);
         assertEquals(expResult, result);
     }
 
@@ -64,9 +69,12 @@ public void testExportDataset() throws Exception {
         String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
         JsonReader jsonReader = Json.createReader(new StringReader(datasetVersionAsJson));
         JsonObject jsonObject = jsonReader.readObject();
-        DatasetVersion nullVersion = null;
+        
+        ExportDataProvider exportDataProviderStub = Mockito.mock(ExportDataProvider.class);
+        Mockito.when(exportDataProviderStub.getDatasetJson()).thenReturn(jsonObject);
+        
         ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
-        openAireExporter.exportDataset(nullVersion, jsonObject, byteArrayOutputStream);
+        openAireExporter.exportDataset(exportDataProviderStub, byteArrayOutputStream);
         String xmlOnOneLine = new String(byteArrayOutputStream.toByteArray());
         String xmlAsString = XmlPrinter.prettyPrintXml(xmlOnOneLine);
         System.out.println("XML: " + xmlAsString);
@@ -86,9 +94,12 @@ public void testValidateExportDataset() throws Exception {
         String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
         JsonReader jsonReader = Json.createReader(new StringReader(datasetVersionAsJson));
         JsonObject jsonObject = jsonReader.readObject();
-        DatasetVersion nullVersion = null;
+        
+        ExportDataProvider exportDataProviderStub = Mockito.mock(ExportDataProvider.class);
+        Mockito.when(exportDataProviderStub.getDatasetJson()).thenReturn(jsonObject);
+        
         ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
-        openAireExporter.exportDataset(nullVersion, jsonObject, byteArrayOutputStream);
+        openAireExporter.exportDataset(exportDataProviderStub, byteArrayOutputStream);
 
         {
             String xmlOnOneLine = new String(byteArrayOutputStream.toByteArray());
@@ -120,14 +131,14 @@ public void fatalError(SAXParseException e) throws SAXException {
     }
 
     /**
-     * Test of isXMLFormat method, of class OpenAireExporter.
+     * Test that OpenAireExporter is an XMLExporter
      */
     @Test
     public void testIsXMLFormat() {
         System.out.println("isXMLFormat");
         OpenAireExporter instance = new OpenAireExporter();
         Boolean expResult = true;
-        Boolean result = instance.isXMLFormat();
+        Boolean result = instance instanceof XMLExporter;
         assertEquals(expResult, result);
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
index e660cf78da2..d600ccac53c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/SchemaDotOrgExporterTest.java
@@ -2,6 +2,8 @@
 
 import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.branding.BrandingUtilTest;
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.XMLExporter;
 import edu.harvard.iq.dataverse.license.License;
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
 import edu.harvard.iq.dataverse.mocks.MockDatasetFieldSvc;
@@ -30,20 +32,21 @@
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Logger;
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * For docs see {@link SchemaDotOrgExporter}.
  */
+@LocalJvmSettings
 public class SchemaDotOrgExporterTest {
 
     private static final Logger logger = Logger.getLogger(SchemaDotOrgExporterTest.class.getCanonicalName());
@@ -77,7 +80,10 @@ public void testExportDataset() throws JsonParseException, ParseException, IOExc
         String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
 
         JsonObject json = JsonUtil.getJsonObject(datasetVersionAsJson);
-        JsonObject json2 = createExportFromJson(json);
+        ExportDataProvider exportDataProviderStub = Mockito.mock(ExportDataProvider.class);
+        Mockito.when(exportDataProviderStub.getDatasetJson()).thenReturn(json);
+        
+        JsonObject json2 = createExportFromJson(exportDataProviderStub);
         
         assertEquals("http://schema.org", json2.getString("@context"));
         assertEquals("Dataset", json2.getString("@type"));
@@ -156,16 +162,19 @@ public void testExportDescriptionTruncation() throws JsonParseException, ParseEx
     String datasetVersionAsJson = new String(Files.readAllBytes(Paths.get(datasetVersionJson.getAbsolutePath())));
 
     JsonObject json = JsonUtil.getJsonObject(datasetVersionAsJson);
-    JsonObject json2 = createExportFromJson(json);
+    ExportDataProvider exportDataProviderStub = Mockito.mock(ExportDataProvider.class);
+    Mockito.when(exportDataProviderStub.getDatasetJson()).thenReturn(json);
+    
+    JsonObject json2 = createExportFromJson(exportDataProviderStub);
 
     assertTrue(json2.getString("description").endsWith("at..."));
     }
     
-    private JsonObject createExportFromJson(JsonObject json) throws JsonParseException, ParseException {
+    private JsonObject createExportFromJson(ExportDataProvider provider) throws JsonParseException, ParseException {
         License license = new License("CC0 1.0", "You can copy, modify, distribute and perform the work, even for commercial purposes, all without asking permission.", URI.create("http://creativecommons.org/publicdomain/zero/1.0/"), URI.create("/resources/images/cc0.png"), true, 1l);
         license.setDefault(true);
         JsonParser jsonParser = new JsonParser(datasetFieldTypeSvc, null, settingsService, licenseService);
-        DatasetVersion version = jsonParser.parseDatasetVersion(json.getJsonObject("datasetVersion"));
+        DatasetVersion version = jsonParser.parseDatasetVersion(provider.getDatasetJson().getJsonObject("datasetVersion"));
         version.setVersionState(DatasetVersion.VersionState.RELEASED);
         SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd");
         Date publicationDate = dateFmt.parse("19551105");
@@ -211,7 +220,8 @@ private JsonObject createExportFromJson(JsonObject json) throws JsonParseExcepti
         ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
         if(schemaDotOrgExporter == null) logger.fine("sdoe" + " null");
         try {
-        schemaDotOrgExporter.exportDataset(version, json, byteArrayOutputStream);
+            ExportDataProvider provider2 = new InternalExportDataProvider(version);
+            schemaDotOrgExporter.exportDataset(provider2, byteArrayOutputStream);
         } catch (Exception e) {
             e.printStackTrace();
         }
@@ -224,7 +234,7 @@ private JsonObject createExportFromJson(JsonObject json) throws JsonParseExcepti
      */
     @Test
     public void testGetProviderName() {
-        assertEquals("schema.org", schemaDotOrgExporter.getProviderName());
+        assertEquals("schema.org", schemaDotOrgExporter.getFormatName());
     }
 
     /**
@@ -233,15 +243,15 @@ public void testGetProviderName() {
     @Test
     public void testGetDisplayName() {
         // We capitalize "Schema.org" because it looks better in the dropdown list and it's what DataCite does in their UI.
-        assertEquals("Schema.org JSON-LD", schemaDotOrgExporter.getDisplayName());
+        assertEquals("Schema.org JSON-LD", schemaDotOrgExporter.getDisplayName(null));
     }
 
     /**
-     * Test of isXMLFormat method, of class SchemaDotOrgExporter.
+     * Test that SchemaDotOrgExporter is not an XMLExporter
      */
     @Test
     public void testIsXMLFormat() {
-        assertEquals(false, schemaDotOrgExporter.isXMLFormat());
+        assertEquals(false, schemaDotOrgExporter instanceof XMLExporter);
     }
 
     /**
@@ -261,57 +271,13 @@ public void testIsAvailableToUsers() {
     }
 
     /**
-     * Test of getXMLNameSpace method, of class SchemaDotOrgExporter.
-     */
-    @Test
-    public void testGetXMLNameSpace() {
-        ExportException expectedException = null;
-        try {
-            String result = schemaDotOrgExporter.getXMLNameSpace();
-        } catch (ExportException ex) {
-            expectedException = ex;
-        }
-        assertEquals(SchemaDotOrgExporter.class.getSimpleName() + ": not an XML format.", expectedException.getMessage());
-    }
-
-    /**
-     * Test of getXMLSchemaLocation method, of class SchemaDotOrgExporter.
+     * Test of XMLExporter interface, of class SchemaDotOrgExporter.
      */
     @Test
-    public void testGetXMLSchemaLocation() {
-        ExportException expectedException = null;
-        try {
-            String result = schemaDotOrgExporter.getXMLSchemaLocation();
-        } catch (ExportException ex) {
-            expectedException = ex;
-        }
-        assertEquals(SchemaDotOrgExporter.class.getSimpleName() + ": not an XML format.", expectedException.getMessage());
+    public void testNotAnXMLExporter() {
+        assertFalse(schemaDotOrgExporter instanceof XMLExporter);
     }
 
-    /**
-     * Test of getXMLSchemaVersion method, of class SchemaDotOrgExporter.
-     */
-    @Test
-    public void testGetXMLSchemaVersion() {
-        ExportException expectedException = null;
-        try {
-            String result = schemaDotOrgExporter.getXMLSchemaVersion();
-        } catch (ExportException ex) {
-            expectedException = ex;
-        }
-        assertEquals(SchemaDotOrgExporter.class.getSimpleName() + ": not an XML format.", expectedException.getMessage());
-    }
-
-    /**
-     * Test of setParam method, of class SchemaDotOrgExporter.
-     */
-    @Test
-    public void testSetParam() {
-        String name = "";
-        Object value = null;
-        schemaDotOrgExporter.setParam(name, value);
-    }
-    
     private static void mockDatasetFieldSvc() {
         datasetFieldTypeSvc.setMetadataBlock("citation");
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt b/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt
index a3f0dffc767..c9e429729df 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt
+++ b/src/test/java/edu/harvard/iq/dataverse/export/dataset-all-defaults.txt
@@ -42,9 +42,9 @@
           },
           {
             "typeName": "alternativeTitle",
-            "multiple": false,
+            "multiple": true,
             "typeClass": "primitive",
-            "value": "Alternative Title"
+            "value": ["Alternative Title"]
           },
           {
             "typeName": "alternativeURL",
@@ -694,9 +694,10 @@
           },
           {
             "typeName": "series",
-            "multiple": false,
+            "multiple": true,
             "typeClass": "compound",
-            "value": {
+            "value": [
+              {
               "seriesName": {
                 "typeName": "seriesName",
                 "multiple": false,
@@ -710,6 +711,7 @@
                 "value": "SeriesInformation"
               }
             }
+            ]
           },
           {
             "typeName": "software",
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java
index 0ac21ffc921..41e6be61bb8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/DdiExportUtilTest.java
@@ -46,6 +46,7 @@ void setup() {
         DdiExportUtil.injectSettingsService(settingsSvc);
     }
     
+    
     @Test
     public void testJson2DdiNoFiles() throws Exception {
         // given
@@ -74,7 +75,7 @@ public void testExportDDI() throws Exception {
         
         // when
         String result = DdiExportUtil.datasetDtoAsJson2ddi(datasetVersionAsJson);
-        logger.fine(result);
+        logger.fine(XmlPrinter.prettyPrintXml(result));
         
         // then
         XmlAssert.assertThat(result).and(datasetAsDdi).ignoreWhitespace().areSimilar();
@@ -94,7 +95,7 @@ public void testJson2ddiHasFiles() throws Exception {
         String datasetAsDdi = XmlPrinter.prettyPrintXml(Files.readString(ddiFile, StandardCharsets.UTF_8));
         logger.fine(datasetAsDdi);
         String result = DdiExportUtil.datasetDtoAsJson2ddi(datasetVersionAsJson);
-        logger.fine(result);
+        logger.fine(XmlPrinter.prettyPrintXml(result));
         boolean filesMinimallySupported = false;
         // TODO: 
         // setting "filesMinimallySupported" to false here, thus disabling the test;
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json
index 1b327c15496..bdff949bb36 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json
+++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-create-new-all-ddi-fields.json
@@ -51,9 +51,9 @@
           },
           {
             "typeName": "alternativeTitle",
-            "multiple": false,
+            "multiple": true,
             "typeClass": "primitive",
-            "value": "Alternative Title"
+            "value": ["Alternative Title1", "Alternative Title2"]
           },
           {
             "typeName": "otherId",
@@ -409,9 +409,9 @@
           },
           {
             "typeName": "productionPlace",
-            "multiple": false,
+            "multiple": true,
             "typeClass": "primitive",
-            "value": "ProductionPlace"
+            "value": ["ProductionPlace"]
           },
           {
             "typeName": "contributor",
@@ -653,22 +653,38 @@
           },
           {
             "typeName": "series",
-            "multiple": false,
+            "multiple": true,
             "typeClass": "compound",
-            "value": {
-              "seriesName": {
-                "typeName": "seriesName",
-                "multiple": false,
-                "typeClass": "primitive",
-                "value": "SeriesName"
+            "value": [ 
+              {
+		"seriesName": {
+                  "typeName": "seriesName",
+                  "multiple": false,
+                  "typeClass": "primitive",
+                  "value": "SeriesName1"
+                },
+                "seriesInformation": {
+                  "typeName": "seriesInformation",
+                  "multiple": false,
+                  "typeClass": "primitive",
+                  "value": "SeriesInformation1"
+                }
               },
-              "seriesInformation": {
-                "typeName": "seriesInformation",
-                "multiple": false,
-                "typeClass": "primitive",
-                "value": "SeriesInformation"
-              }
-            }
+              {
+                "seriesName": {
+                  "typeName": "seriesName",
+                  "multiple": false,
+                  "typeClass": "primitive",
+                  "value": "SeriesName2"
+                },
+                "seriesInformation": {
+                  "typeName": "seriesInformation",
+                  "multiple": false,
+                  "typeClass": "primitive",
+                  "value": "SeriesInformation2"
+		}
+	      }
+            ]
           },
           {
             "typeName": "software",
@@ -842,25 +858,25 @@
                   "typeName": "westLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "10"
+                  "value": "-72"
                 },
                 "eastLongitude": {
                   "typeName": "eastLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "20"
+                  "value": "-70"
                 },
                 "northLongitude": {
                   "typeName": "northLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "30"
+                  "value": "43"
                 },
                 "southLongitude": {
                   "typeName": "southLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "40"
+                  "value": "42"
                 }
               },
               {
@@ -868,25 +884,25 @@
                   "typeName": "westLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "50"
+                  "value": "-18"
                 },
                 "eastLongitude": {
                   "typeName": "eastLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "60"
+                  "value": "-13"
                 },
                 "northLongitude": {
                   "typeName": "northLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "70"
+                  "value": "29"
                 },
                 "southLongitude": {
                   "typeName": "southLongitude",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "80"
+                  "value": "28"
                 }
               }
             ]
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json
index 7845f77d33f..9bdc7e45349 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json
+++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.json
@@ -29,6 +29,12 @@
             "typeClass": "primitive",
             "value": "Darwin's Finches"
           },
+          {
+              "typeName": "alternativeTitle",
+              "multiple": true,
+              "typeClass": "primitive",
+              "value": ["Darwin's Finches Alternative Title1", "Darwin's Finches Alternative Title2"]
+          },
           {
             "typeName": "author",
             "multiple": true,
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml
index 6cce35ec150..6730c44603a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml
+++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-finch1.xml
@@ -17,20 +17,22 @@
     <citation>
       <titlStmt>
         <titl xml:lang="en">Darwin's Finches</titl>
+        <altTitl>Darwin's Finches Alternative Title1</altTitl>
+        <altTitl>Darwin's Finches Alternative Title2</altTitl>
         <IDNo agency="DOI">doi:10.5072/FK2/PCA2E3</IDNo>
       </titlStmt>
       <rspStmt>
         <AuthEnty affiliation="Birds Inc.">Finch, Fiona</AuthEnty>
       </rspStmt>
       <prodStmt>
-        <producer affiliation="Hawk Institute" abbr="ProdAbb" role="http://www.hawk.edu/logo" URI="http://www.hawk.edu/url">Johnny Hawk</producer>
+        <producer affiliation="Hawk Institute" abbr="ProdAbb">Johnny Hawk</producer>
       </prodStmt>
       <distStmt>
-        <distrbtr xml:lang="en" affiliation="Valhalla Polytechnic" abbr="Dist-Abb" URI="http://www.valhalla.edu/url" role="http://www.valhalla.edu/logo">Odin Raven</distrbtr>
+        <distrbtr xml:lang="en" affiliation="Valhalla Polytechnic" abbr="Dist-Abb" URI="http://www.valhalla.edu/url">Odin Raven</distrbtr>
         <contact affiliation="Finch Academy" email="finch@mailinator.com">Jimmy Finch</contact>
         <depositr>Added, Depositor</depositr>
       </distStmt>
-      <holdings URI="https://doi.org/10.5072/FK2/PCA2E3"></holdings>
+      <holdings URI="https://doi.org/10.5072/FK2/PCA2E3"/>
     </citation>
     <stdyInfo>
       <subject>
@@ -45,17 +47,17 @@
         <timePrd cycle="P1" event="end" date="20160630">20160630</timePrd>
         <collDate cycle="P1" event="start" date="20070831">20070831</collDate>
         <collDate cycle="P1" event="end" date="20130630">20130630</collDate>
-        <dataKind>Kind of Data</dataKind>
         <nation>USA</nation>
         <geogCover>Cambridge</geogCover>
         <geogCover>MA</geogCover>
         <geogCover>Other Geographic Coverage</geogCover>
         <geoBndBox>
-          <southBL>41.6</southBL>
           <westBL>60.3</westBL>
           <eastBL>59.8</eastBL>
+          <southBL>41.6</southBL>
           <northBL>43.8</northBL>
         </geoBndBox>
+        <dataKind>Kind of Data</dataKind>
       </sumDscr>
     </stdyInfo>
     <method>
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-hdl.json b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-hdl.json
index 01cc4f2a13b..3f1597599b4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-hdl.json
+++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/dataset-hdl.json
@@ -13,6 +13,10 @@
     "lastUpdateTime": "2015-09-29T17:47:35Z",
     "releaseTime": "2015-09-29T17:47:35Z",
     "createTime": "2015-09-24T16:47:50Z",
+    "license": {
+      "name": "CC0 1.0",
+      "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+    },
     "metadataBlocks": {
       "citation": {
         "displayName": "Citation Metadata",
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml b/src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml
index 0570c832e4f..507d752192d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml
+++ b/src/test/java/edu/harvard/iq/dataverse/export/ddi/exportfull.xml
@@ -21,7 +21,8 @@
       <titlStmt>
         <titl>Replication Data for: Title</titl>
         <subTitl>Subtitle</subTitl>
-        <altTitl>Alternative Title</altTitl>
+        <altTitl>Alternative Title1</altTitl>
+        <altTitl>Alternative Title2</altTitl>
         <IDNo agency="DOI">doi:10.5072/FK2/WKUKGV</IDNo>
         <IDNo agency="OtherIDAgency1">OtherIDIdentifier1</IDNo>
         <IDNo agency="OtherIDAgency2">OtherIDIdentifier2</IDNo>
@@ -33,8 +34,8 @@
         <othId role="Data Curator">LastContributor2, FirstContributor2</othId>
       </rspStmt>
       <prodStmt>
-        <producer affiliation="ProducerAffiliation1" abbr="ProducerAbbreviation1" role="http://ProducerLogoURL1.org" URI="http://ProducerURL1.org">LastProducer1, FirstProducer1</producer>
-        <producer affiliation="ProducerAffiliation2" abbr="ProducerAbbreviation2" role="http://ProducerLogoURL2.org" URI="http://ProducerURL2.org">LastProducer2, FirstProducer2</producer>
+        <producer affiliation="ProducerAffiliation1" abbr="ProducerAbbreviation1">LastProducer1, FirstProducer1</producer>
+        <producer affiliation="ProducerAffiliation2" abbr="ProducerAbbreviation2">LastProducer2, FirstProducer2</producer>
         <prodDate>1003-01-01</prodDate>
         <prodPlac>ProductionPlace</prodPlac>
         <software version="SoftwareVersion1">SoftwareName1</software>
@@ -44,19 +45,23 @@
       </prodStmt>
       <distStmt>
         <distrbtr source="archive">Root</distrbtr>
-        <distrbtr affiliation="DistributorAffiliation1" abbr="DistributorAbbreviation1" URI="http://DistributorURL1.org" role="http://DistributorLogoURL1.org">LastDistributor1, FirstDistributor1</distrbtr>
-        <distrbtr affiliation="DistributorAffiliation2" abbr="DistributorAbbreviation2" URI="http://DistributorURL2.org" role="http://DistributorLogoURL2.org">LastDistributor2, FirstDistributor2</distrbtr>
+        <distrbtr affiliation="DistributorAffiliation1" abbr="DistributorAbbreviation1" URI="http://DistributorURL1.org">LastDistributor1, FirstDistributor1</distrbtr>
+        <distrbtr affiliation="DistributorAffiliation2" abbr="DistributorAbbreviation2" URI="http://DistributorURL2.org">LastDistributor2, FirstDistributor2</distrbtr>
         <contact affiliation="ContactAffiliation1" email="ContactEmail1@mailinator.com">LastContact1, FirstContact1</contact>
         <contact affiliation="ContactAffiliation2" email="ContactEmail2@mailinator.com">LastContact2, FirstContact2</contact>
-        <distDate>1004-01-01</distDate>
         <depositr>LastDepositor, FirstDepositor</depositr>
         <depDate>1002-01-01</depDate>
+        <distDate>1004-01-01</distDate>
       </distStmt>
       <serStmt>
-        <serName>SeriesName</serName>
-        <serInfo>SeriesInformation</serInfo>
+        <serName>SeriesName1</serName>
+        <serInfo>SeriesInformation1</serInfo>
+      </serStmt>
+      <serStmt>
+        <serName>SeriesName2</serName>
+        <serInfo>SeriesInformation2</serInfo>
       </serStmt>
-      <holdings URI="https://doi.org/10.5072/FK2/WKUKGV"></holdings>
+      <holdings URI="https://doi.org/10.5072/FK2/WKUKGV"/>
     </citation>
     <stdyInfo>
       <subject>
@@ -78,34 +83,28 @@
         <collDate cycle="P1" event="end" date="1006-01-01">1006-01-01</collDate>
         <collDate cycle="P2" event="start" date="1006-02-01">1006-02-01</collDate>
         <collDate cycle="P2" event="end" date="1006-02-02">1006-02-02</collDate>
-        <dataKind>KindOfData1</dataKind>
-        <dataKind>KindOfData2</dataKind>
         <nation>Afghanistan</nation>
+        <nation>Albania</nation>
         <geogCover>GeographicCoverageCity1</geogCover>
         <geogCover>GeographicCoverageStateProvince1</geogCover>
         <geogCover>GeographicCoverageOther1</geogCover>
-        <nation>Albania</nation>
         <geogCover>GeographicCoverageCity2</geogCover>
         <geogCover>GeographicCoverageStateProvince2</geogCover>
         <geogCover>GeographicCoverageOther2</geogCover>
-        <geoBndBox>
-          <westBL>10</westBL>
-          <eastBL>20</eastBL>
-          <northBL>30</northBL>
-          <southBL>40</southBL>
-        </geoBndBox>
-        <geoBndBox>
-          <southBL>80</southBL>
-          <northBL>70</northBL>
-          <eastBL>60</eastBL>
-          <westBL>50</westBL>
-        </geoBndBox>
         <geogUnit>GeographicUnit1</geogUnit>
         <geogUnit>GeographicUnit2</geogUnit>
+        <geoBndBox>
+          <westBL>-72</westBL>
+          <eastBL>-70</eastBL>
+          <southBL>42</southBL>
+          <northBL>43</northBL>
+        </geoBndBox>
         <anlyUnit>UnitOfAnalysis1</anlyUnit>
         <anlyUnit>UnitOfAnalysis2</anlyUnit>
         <universe>Universe1</universe>
         <universe>Universe2</universe>
+        <dataKind>KindOfData1</dataKind>
+        <dataKind>KindOfData2</dataKind>
       </sumDscr>
       <notes>Notes1</notes>
     </stdyInfo>
@@ -117,10 +116,12 @@
         <frequenc>Frequency</frequenc>
         <sampProc>SamplingProcedure</sampProc>
         <targetSampleSize>
-          <sampleSizeFormula>TargetSampleSizeFormula</sampleSizeFormula>
           <sampleSize>100</sampleSize>
+          <sampleSizeFormula>TargetSampleSizeFormula</sampleSizeFormula>
         </targetSampleSize>
         <deviat>MajorDeviationsForSampleDesign</deviat>
+        <collMode>CollectionMode</collMode>
+        <resInstru>TypeOfResearchInstrument</resInstru>
         <sources>
           <dataSrc>DataSources1</dataSrc>
           <dataSrc>DataSources2</dataSrc>
@@ -128,23 +129,20 @@
           <srcChar>CharacteristicOfSourcesNoted</srcChar>
           <srcDocu>DocumentationAndAccessToSources</srcDocu>
         </sources>
-        <collMode>CollectionMode</collMode>
-        <resInstru>TypeOfResearchInstrument</resInstru>
         <collSitu>CharacteristicsOfDataCollectionSituation</collSitu>
         <actMin>ActionsToMinimizeLosses</actMin>
-        <conOps>ControlOperations</conOps>
+        <ConOps>ControlOperations</ConOps>
         <weight>Weighting</weight>
         <cleanOps>CleaningOperations</cleanOps>
       </dataColl>
+      <notes type="NotesType" subject="NotesSubject">NotesText</notes>
       <anlyInfo>
         <respRate>ResponseRate</respRate>
         <EstSmpErr>EstimatesOfSamplingError</EstSmpErr>
         <dataAppr>OtherFormsOfDataAppraisal</dataAppr>
       </anlyInfo>
-      <notes type="NotesType" subject="NotesSubject">NotesText</notes>
     </method>
     <dataAccs>
-      <notes type="DVN:TOA" level="dv">Terms of Access</notes>
       <setAvail>
         <accsPlac>Data Access Place</accsPlac>
         <origArch>Original Archive</origArch>
@@ -162,6 +160,7 @@
         <conditions>Conditions </conditions>
         <disclaimer>Disclaimer</disclaimer>
       </useStmt>
+      <notes type="DVN:TOA" level="dv">Terms of Access</notes>
     </dataAccs>
     <othrStdyMat>
       <relMat>RelatedMaterial1</relMat>
@@ -171,6 +170,7 @@
       <relPubl>
         <citation>
           <titlStmt>
+            <titl>RelatedPublicationCitation1</titl>
             <IDNo agency="ark">RelatedPublicationIDNumber1</IDNo>
           </titlStmt>
           <biblCit>RelatedPublicationCitation1</biblCit>
@@ -180,6 +180,7 @@
       <relPubl>
         <citation>
           <titlStmt>
+            <titl>RelatedPublicationCitation2</titl>
             <IDNo agency="arXiv">RelatedPublicationIDNumber2</IDNo>
           </titlStmt>
           <biblCit>RelatedPublicationCitation2</biblCit>
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtilTest.java
index 69c8083734c..4032f4649a4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/export/dublincore/DublinCoreExportUtilTest.java
@@ -13,9 +13,9 @@
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
 import org.junit.jupiter.api.Test;
 import org.xmlunit.assertj3.XmlAssert;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java
index ab3a0263d66..6f0132e2bc9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandlerTest.java
@@ -1,26 +1,32 @@
 package edu.harvard.iq.dataverse.externaltools;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.DataFileServiceBean;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.FileMetadata;
+import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
+import edu.harvard.iq.dataverse.util.URLTokenUtil;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
 import org.junit.jupiter.api.Test;
 
-import javax.json.Json;
-import javax.json.JsonObject;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
 import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.junit.jupiter.api.Assertions.assertNotNull;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.ArrayList;
 import java.util.List;
 
+@LocalJvmSettings
 public class ExternalToolHandlerTest {
 
     // TODO: It would probably be better to split these into individual tests.
@@ -48,7 +54,7 @@ public void testGetToolUrlWithOptionalQueryParameters() {
         Exception expectedException1 = null;
         String nullLocaleCode = null;
         try {
-            ExternalToolHandler externalToolHandler1 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata, nullLocaleCode);
+            URLTokenUtil externalToolHandler1 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata, nullLocaleCode);
         } catch (Exception ex) {
             expectedException1 = ex;
         }
@@ -66,7 +72,7 @@ public void testGetToolUrlWithOptionalQueryParameters() {
         DataFile dataFile = new DataFile();
         dataFile.setId(42l);
         try {
-            ExternalToolHandler externalToolHandler1 = new ExternalToolHandler(externalTool, dataFile, nullApiToken, nullFileMetadata, nullLocaleCode);
+            URLTokenUtil externalToolHandler1 = new ExternalToolHandler(externalTool, dataFile, nullApiToken, nullFileMetadata, nullLocaleCode);
         } catch (Exception ex) {
             expectedException1 = ex;
         }
@@ -87,7 +93,7 @@ public void testGetToolUrlWithOptionalQueryParameters() {
                 .build().toString());
         Exception expectedException2 = null;
         try {
-            ExternalToolHandler externalToolHandler2 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata, nullLocaleCode);
+            URLTokenUtil externalToolHandler2 = new ExternalToolHandler(externalTool, nullDataFile, nullApiToken, nullFileMetadata, nullLocaleCode);
         } catch (Exception ex) {
             expectedException2 = ex;
         }
@@ -220,10 +226,10 @@ public void testGetToolUrlWithAllowedApiCalls() {
         assertTrue(et != null);
         System.out.println("allowedApiCalls et created");
         System.out.println(et.getAllowedApiCalls());
-        ExternalToolHandler externalToolHandler = new ExternalToolHandler(et, ds, at, null);
+        URLTokenUtil externalToolHandler = new ExternalToolHandler(et, ds, at, null);
         System.out.println("allowedApiCalls eth created");
         JsonObject jo = externalToolHandler
-                .createPostBody(externalToolHandler.getParams(JsonUtil.getJsonObject(et.getToolParameters()))).build();
+                .createPostBody(externalToolHandler.getParams(JsonUtil.getJsonObject(et.getToolParameters())), JsonUtil.getJsonArray(et.getAllowedApiCalls())).build();
         assertEquals(1, jo.getJsonObject("queryParameters").getInt("datasetId"));
         String signedUrl = jo.getJsonArray("signedUrls").getJsonObject(0).getString("signedUrl");
         // The date and token will change each time but check for the constant parts of
@@ -234,4 +240,43 @@ public void testGetToolUrlWithAllowedApiCalls() {
         assertTrue(signedUrl.contains("&token="));
         System.out.println(JsonUtil.prettyPrint(jo));
     }
+
+    @Test
+    @JvmSetting(key = JvmSettings.SITE_URL, value = "https://librascholar.org")
+    public void testDatasetConfigureTool() {
+        List<ExternalToolType> externalToolTypes = new ArrayList<>();
+        var externalToolType = new ExternalToolType();
+        externalToolType.setType(ExternalTool.Type.CONFIGURE);
+        externalToolTypes.add(externalToolType);
+        var scope = ExternalTool.Scope.DATASET;
+        String toolUrl = "http://example.com";
+        var externalTool = new ExternalTool("displayName", "toolName", "description", externalToolTypes, scope, toolUrl, "{}", DataFileServiceBean.MIME_TYPE_TSV_ALT);
+
+        externalTool.setToolParameters(Json.createObjectBuilder()
+                .add("queryParameters", Json.createArrayBuilder()
+                        .add(Json.createObjectBuilder()
+                                .add("siteUrl", "{siteUrl}")
+                        )
+                        .add(Json.createObjectBuilder()
+                                .add("datasetPid", "{datasetPid}")
+                        )
+                        .add(Json.createObjectBuilder()
+                                .add("localeCode", "{localeCode}")
+                        )
+                )
+                .build().toString());
+
+        var dataset = new Dataset();
+        dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.5072", "ABC123", null, DOIServiceBean.DOI_RESOLVER_URL, null));
+        ApiToken nullApiToken = null;
+        String nullLocaleCode = "en";
+        var externalToolHandler = new ExternalToolHandler(externalTool, dataset, nullApiToken, nullLocaleCode);
+        System.out.println("tool: " + externalToolHandler.getToolUrlWithQueryParams());
+        assertEquals("http://example.com?siteUrl=https://librascholar.org&datasetPid=doi:10.5072/ABC123&localeCode=en", externalToolHandler.getToolUrlWithQueryParams());
+        assertFalse(externalToolHandler.getExternalTool().isExploreTool());
+        assertEquals("configure", externalToolHandler.getExternalTool().getExternalToolTypes().get(0).getType().toString());
+        assertEquals("dataset", externalToolHandler.getExternalTool().getScope().toString());
+
+    }
+
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java
index 3885c9b358c..4f5af8b97b0 100644
--- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolServiceBeanTest.java
@@ -1,5 +1,6 @@
 package edu.harvard.iq.dataverse.externaltools;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.DataFileServiceBean;
 import edu.harvard.iq.dataverse.DataTable;
@@ -8,14 +9,15 @@
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.GlobalId;
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
+import edu.harvard.iq.dataverse.util.URLTokenUtil;
+
 import java.util.ArrayList;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObjectBuilder;
+
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.Test;
 
 public class ExternalToolServiceBeanTest {
 
@@ -49,7 +51,7 @@ public void testfindAll() {
         externalToolTypes.add(externalToolType);
         ExternalTool.Scope scope = ExternalTool.Scope.FILE;
         ExternalTool externalTool = new ExternalTool("displayName", "toolName", "description", externalToolTypes, scope, "http://foo.com", "{}", DataFileServiceBean.MIME_TYPE_TSV_ALT);
-        ExternalToolHandler externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd, null);
+        URLTokenUtil externalToolHandler4 = new ExternalToolHandler(externalTool, dataFile, apiToken, fmd, null);
         List<ExternalTool> externalTools = new ArrayList<>();
         externalTools.add(externalTool);
         List<ExternalTool> availableExternalTools = externalToolService.findExternalToolsByFile(externalTools, dataFile);
@@ -142,7 +144,7 @@ public void testParseAddFileToolFilePid() {
         assertEquals("explorer", externalTool.getToolName());
         DataFile dataFile = new DataFile();
         dataFile.setId(42l);
-        dataFile.setGlobalId(new GlobalId("doi:10.5072/FK2/RMQT6J/G9F1A1"));
+        dataFile.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.5072","FK2/RMQT6J/G9F1A1", "/", DOIServiceBean.DOI_RESOLVER_URL, null));
         FileMetadata fmd = new FileMetadata();
         fmd.setId(2L);
         DatasetVersion dv = new DatasetVersion();
@@ -337,7 +339,7 @@ public void testParseAddExternalToolInputWrongType() {
         }
         assertNotNull(expectedException);
         System.out.println("exception: " + expectedException);
-        assertEquals("Type must be one of these values: [explore, configure, preview].", expectedException.getMessage());
+        assertEquals("Type must be one of these values: [explore, configure, preview, query].", expectedException.getMessage());
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java
index bbe029e77e1..ea8613b70bf 100644
--- a/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/externaltools/ExternalToolTest.java
@@ -1,10 +1,11 @@
 package edu.harvard.iq.dataverse.externaltools;
 
-import javax.json.JsonObject;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import jakarta.json.JsonObject;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 import edu.harvard.iq.dataverse.DataFileServiceBean;
+
 import java.util.ArrayList;
 import java.util.List;
 
@@ -28,13 +29,13 @@ public void testToJson() {
         externalTool.setId(42l);
         JsonObject jsonObject = externalTool.toJson().build();
         System.out.println("result: " + jsonObject);
-        assertEquals("testToJson() with ExternalTool.DISPLAY_NAME", "myDisplayName", jsonObject.getString(ExternalTool.DISPLAY_NAME));
-        assertEquals("testToJson() with ExternalTool.TOOL_NAME", "explorer", jsonObject.getString(ExternalTool.TOOL_NAME));
-        assertEquals("testToJson() with ExternalTool.DESCRIPTION", "myDescription", jsonObject.getString(ExternalTool.DESCRIPTION));
-        assertEquals("testToJson() with ExternalTool.TYPES", "explore", jsonObject.getJsonArray(ExternalTool.TYPES).getString(0));
-        assertEquals("testToJson() with ExternalTool.TOOL_URL", "http://example.com", jsonObject.getString(ExternalTool.TOOL_URL));
-        assertEquals("testToJson() with ExternalTool.TOOL_PARAMETERS", "{}", jsonObject.getString(ExternalTool.TOOL_PARAMETERS));
-        assertEquals("testToJson() with ExternalTool.CONTENT_TYPE", DataFileServiceBean.MIME_TYPE_TSV_ALT, jsonObject.getString(ExternalTool.CONTENT_TYPE));
+        assertEquals("myDisplayName", jsonObject.getString(ExternalTool.DISPLAY_NAME), "testToJson() with ExternalTool.DISPLAY_NAME");
+        assertEquals("explorer", jsonObject.getString(ExternalTool.TOOL_NAME), "testToJson() with ExternalTool.TOOL_NAME");
+        assertEquals("myDescription", jsonObject.getString(ExternalTool.DESCRIPTION), "testToJson() with ExternalTool.DESCRIPTION");
+        assertEquals("explore", jsonObject.getJsonArray(ExternalTool.TYPES).getString(0), "testToJson() with ExternalTool.TYPES");
+        assertEquals("http://example.com", jsonObject.getString(ExternalTool.TOOL_URL), "testToJson() with ExternalTool.TOOL_URL");
+        assertEquals("{}", jsonObject.getString(ExternalTool.TOOL_PARAMETERS), "testToJson() with ExternalTool.TOOL_PARAMETERS");
+        assertEquals(DataFileServiceBean.MIME_TYPE_TSV_ALT, jsonObject.getString(ExternalTool.CONTENT_TYPE), "testToJson() with ExternalTool.CONTENT_TYPE");
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java
index 2c91eebbc83..7c31db5bee2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/feedback/FeedbackUtilTest.java
@@ -5,7 +5,6 @@
 import edu.harvard.iq.dataverse.DataFileCategory;
 import edu.harvard.iq.dataverse.DataFileTag;
 import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.Dataverse;
@@ -27,21 +26,18 @@
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonReader;
-import javax.mail.internet.AddressException;
-import javax.mail.internet.InternetAddress;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import org.junit.BeforeClass;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonReader;
+import jakarta.mail.internet.AddressException;
+import jakarta.mail.internet.InternetAddress;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import org.junit.jupiter.api.BeforeAll;
 import org.mockito.Mockito;
 
 public class FeedbackUtilTest {
@@ -62,7 +58,7 @@ public class FeedbackUtilTest {
     private static final String systemEmail = "support@librascholar.edu";
     private static final boolean weKnowHowToCreateMockAuthenticatedUsers = false;
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() throws IOException, JsonParseException, AddressException {
 
         if (weKnowHowToCreateMockAuthenticatedUsers) {
@@ -181,9 +177,8 @@ public void testGatherFeedbackOnDataverse() {
         String messageSubject = "nice dataverse";
         String userMessage = "Let's talk!";
         System.out.println("first gather feedback");
-        List<Feedback> feedbacks1 = FeedbackUtil.gatherFeedback(dataverse, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        Feedback feedback = feedbacks1.get(0);
-        assertEquals(installationBrandName + " contact: " + messageSubject, feedback.getSubject());
+        Feedback feedback1 = FeedbackUtil.gatherFeedback(dataverse, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, true);
+        assertEquals(installationBrandName + " contact: " + messageSubject, feedback1.getSubject());
         String expectedBody
                 = "You have just been sent the following message from " + userEmail + " "
                 + "via the " + installationBrandName + " hosted dataverse named \"dvAlias1\":\n\n"
@@ -197,23 +192,26 @@ public void testGatherFeedbackOnDataverse() {
                 + "If you believe this was an error, please contact "
                 + supportTeamName + " at " + systemEmail + ". "
                 + "To respond directly to the individual who sent the message, simply reply to this email.";
-        System.out.println("body:\n\n" + feedback.getBody());
-        assertEquals(expectedBody, feedback.getBody());
-        assertEquals("dvContact1@librascholar.edu", feedback.getToEmail());
-        assertEquals("personClickingContactOrSupportButton@example.com", feedback.getFromEmail());
-        JsonObject jsonObject = feedback.toJsonObjectBuilder().build();
+        System.out.println("body:\n\n" + feedback1.getBody());
+        assertEquals(expectedBody, feedback1.getBody());
+        assertEquals("dvContact1@librascholar.edu,dvContact2@librascholar.edu", feedback1.getToEmail());
+        assertEquals(systemEmail, feedback1.getCcEmail());
+        
+        assertEquals("personClickingContactOrSupportButton@example.com", feedback1.getFromEmail());
+        JsonObject jsonObject = feedback1.toJsonObjectBuilder().build();
         System.out.println("json: " + jsonObject);
         assertEquals("personClickingContactOrSupportButton@example.com", jsonObject.getString("fromEmail"));
-        assertEquals("dvContact1@librascholar.edu", jsonObject.getString("toEmail"));
+        assertEquals("dvContact1@librascholar.edu,dvContact2@librascholar.edu", jsonObject.getString("toEmail"));
+        assertEquals(systemEmail, jsonObject.getString("ccEmail"));
         assertEquals(installationBrandName + " contact: " + "nice dataverse", jsonObject.getString("subject"));
         dataverse.setDataverseContacts(new ArrayList<>());
         System.out.println("second gather feedback");
-        List<Feedback> feedbacks2 = FeedbackUtil.gatherFeedback(dataverse, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        System.out.println("feedbacks2: " + feedbacks2);
-        feedback = feedbacks2.get(0);
-        assertEquals("support@librascholar.edu", feedback.getToEmail());
-        System.out.println("body:\n\n" + feedback.getBody());
-        assertTrue(feedback.getBody().startsWith("There is no contact address on file for this dataverse so this message is being sent to the system address."));
+        Feedback feedback2 = FeedbackUtil.gatherFeedback(dataverse, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, false);
+        System.out.println("feedbacks2: " + feedback2);
+        assertEquals(systemEmail, feedback2.getToEmail());
+        assertEquals(null, feedback2.getCcEmail());
+        System.out.println("body:\n\n" + feedback2.getBody());
+        assertTrue(feedback2.getBody().startsWith("There is no contact address on file for this dataverse so this message is being sent to the system address."));
     }
 
     @Test
@@ -236,17 +234,18 @@ public void testGatherFeedbackOnDataset() {
         DataverseSession dataverseSession = null;
         String messageSubject = "nice dataset";
         String userMessage = "Let's talk!";
-        List<Feedback> feedbacks = FeedbackUtil.gatherFeedback(dataset, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        System.out.println("feedbacks: " + feedbacks);
-        assertEquals(2, feedbacks.size());
-        Feedback feedback = feedbacks.get(0);
+        Feedback feedback = FeedbackUtil.gatherFeedback(dataset, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, true);
+        System.out.println("feedbacks: " + feedback);
         System.out.println("Subject: " + feedback.getSubject());
         System.out.println("Body: " + feedback.getBody());
         System.out.println("From: " + feedback.getFromEmail());
         System.out.println("To: " + feedback.getToEmail());
-        assertEquals("ContactEmail1@mailinator.com", feedback.getToEmail());
+        System.out.println("CC: " + feedback.getCcEmail());
+
+        assertEquals("ContactEmail1@mailinator.com,ContactEmail2@mailinator.com", feedback.getToEmail());
+        assertEquals(systemEmail, feedback.getCcEmail());
         assertEquals(installationBrandName + " contact: " + messageSubject, feedback.getSubject());
-        String expected = "Hello Tom Brady,\n\n"
+        String expected = "Hello Tom Brady and Homer Simpson,\n\n"
                 // FIXME: change from "personClickingContactOrSupportButton@example.com" to "Homer Simpson" or whatever (add to contact form).
                 + "You have just been sent the following message from " + userEmail + " "
                 + "via the " + installationBrandName + " hosted dataset "
@@ -284,15 +283,15 @@ public void testGatherFeedbackOnDatasetNoContacts() {
         DataverseSession dataverseSession = null;
         String messageSubject = "nice dataset";
         String userMessage = "Let's talk!";
-        List<Feedback> feedbacks = FeedbackUtil.gatherFeedback(dataset, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        System.out.println("feedbacks: " + feedbacks);
-        assertEquals(1, feedbacks.size());
-        Feedback feedback = feedbacks.get(0);
+        Feedback feedback = FeedbackUtil.gatherFeedback(dataset, dataverseSession, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, false);
         System.out.println("Subject: " + feedback.getSubject());
         System.out.println("Body: " + feedback.getBody());
         System.out.println("From: " + feedback.getFromEmail());
         System.out.println("To: " + feedback.getToEmail());
+        System.out.println("CC: " + feedback.getCcEmail());
+
         assertEquals(systemEmail, feedback.getToEmail());
+        assertEquals(null, feedback.getCcEmail());
         assertEquals(installationBrandName + " contact: " + messageSubject, feedback.getSubject());
         String expected = "There is no contact address on file for this dataset so this message is being sent to the system address.\n\n"
                 // FIXME: Add more context for person who receives systemEmail messages.
@@ -357,8 +356,7 @@ public void testGatherFeedbackOnFile() {
 
         String messageSubject = "nice file";
         String userMessage = "Let's talk!";
-        List<Feedback> feedbacks = FeedbackUtil.gatherFeedback(dataFile, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        Feedback feedback = feedbacks.get(0);
+        Feedback feedback = FeedbackUtil.gatherFeedback(dataFile, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, false);
         System.out.println("feedback: " + feedback);
         System.out.println("Subject: " + feedback.getSubject());
         System.out.println("Body: " + feedback.getBody());
@@ -425,8 +423,7 @@ public void testGatherFeedbackOnFileNoContacts() {
 
         String messageSubject = "nice file";
         String userMessage = "Let's talk!";
-        List<Feedback> feedbacks = FeedbackUtil.gatherFeedback(dataFile, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        Feedback feedback = feedbacks.get(0);
+        Feedback feedback = FeedbackUtil.gatherFeedback(dataFile, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, false);
         System.out.println("feedback: " + feedback);
         System.out.println("Subject: " + feedback.getSubject());
         System.out.println("Body: " + feedback.getBody());
@@ -443,9 +440,7 @@ public void testGatherFeedbackFromSupportButtonNullSession() {
         String messageSubject = "I'm clicking the support button.";
         String userMessage = "Help!";
         DvObject nullDvObject = null;
-        List<Feedback> feedbacks1 = FeedbackUtil.gatherFeedback(nullDvObject, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        System.out.println("feedbacks1: " + feedbacks1);
-        Feedback feedback = feedbacks1.get(0);
+        Feedback feedback = FeedbackUtil.gatherFeedback(nullDvObject, dataverseSessionNull, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, false);
         assertEquals(installationBrandName + " support request: " + messageSubject, feedback.getSubject());
         String expectedBody
                 = "LibraScholar SWAT Team,\n\n"
@@ -460,15 +455,11 @@ public void testGatherFeedbackFromSupportButtonNullSession() {
         assertEquals("support@librascholar.edu", feedback.getToEmail());
         assertEquals("personClickingContactOrSupportButton@example.com", feedback.getFromEmail());
         InternetAddress nullSystemAddress = null;
-        List<Feedback> feedbacks2 = FeedbackUtil.gatherFeedback(nullDvObject, dataverseSessionNull, messageSubject, userMessage, nullSystemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        assertEquals(1, feedbacks2.size());
-        feedback = feedbacks2.get(0);
-        assertEquals(null, feedback.getToEmail());
+        Feedback feedback2 = FeedbackUtil.gatherFeedback(nullDvObject, dataverseSessionNull, messageSubject, userMessage, nullSystemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, false);
+        assertEquals(null, feedback2.getToEmail());
         String nullUserMessage = null;
-        List<Feedback> feedbacks3 = FeedbackUtil.gatherFeedback(nullDvObject, dataverseSessionNull, messageSubject, nullUserMessage, nullSystemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        assertEquals(1, feedbacks3.size());
-        feedback = feedbacks3.get(0);
-        assertEquals(null, feedback.getToEmail());
+        Feedback feedback3 = FeedbackUtil.gatherFeedback(nullDvObject, dataverseSessionNull, messageSubject, nullUserMessage, nullSystemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, false);
+        assertEquals(null, feedback3.getToEmail());
     }
 
     @Test
@@ -479,8 +470,7 @@ public void testGatherFeedbackFromSupportButtonLoggedIn() {
         String messageSubject = "I'm clicking the support button.";
         String userMessage = "Help!";
         DvObject dvObject = null;
-        List<Feedback> feedbacks = FeedbackUtil.gatherFeedback(dvObject, dataverseSessionAuthenticated, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName);
-        Feedback feedback = feedbacks.get(0);
+        Feedback feedback = FeedbackUtil.gatherFeedback(dvObject, dataverseSessionAuthenticated, messageSubject, userMessage, systemAddress, userEmail, baseUrl, installationBrandName, supportTeamName, false);
         assertEquals(messageSubject, feedback.getSubject());
         assertEquals("Help!", feedback.getBody());
         assertEquals("support@librascholar.edu", feedback.getToEmail());
diff --git a/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java
new file mode 100644
index 00000000000..56f8731b9c8
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/globus/GlobusUtilTest.java
@@ -0,0 +1,88 @@
+package edu.harvard.iq.dataverse.globus;
+
+import static org.junit.jupiter.api.Assertions.*;
+import static org.mockito.Mockito.mock;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+
+import edu.harvard.iq.dataverse.DOIServiceBean;
+import edu.harvard.iq.dataverse.DataFile;
+import edu.harvard.iq.dataverse.Dataset;
+import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.dataaccess.AbstractRemoteOverlayAccessIO;
+import edu.harvard.iq.dataverse.dataaccess.DataAccess;
+import edu.harvard.iq.dataverse.dataaccess.GlobusAccessibleStore;
+import edu.harvard.iq.dataverse.mocks.MocksFactory;
+import edu.harvard.iq.dataverse.util.json.JsonUtil;
+import jakarta.json.JsonObject;
+
+public class GlobusUtilTest {
+
+    private Dataset dataset;
+    private DataFile mDatafile;
+    private DataFile rDatafile;
+    private String baseStoreId1 = "182ad2bda2f-c3508e719076";
+    private String baseStoreId2 = "182ad2bda2f-c3508e719077";
+    private String logoPath = "d7c42580-6538-4605-9ad8-116a61982644/hdc1/image002.mrc";
+    private String authority = "10.5072";
+    private String identifier = "F2ABCDEF";
+
+    @BeforeEach
+    public void setUp() {
+
+        // Managed Globus Store
+
+        // Nonsense endpoint/paths
+        System.setProperty("dataverse.files.globusm." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH,
+                "d7c42580-6538-4605-9ad8-116a61982644/hdc1");
+        System.setProperty("dataverse.files.globusm.managed", "true");
+
+        // Remote Store
+        System.setProperty("dataverse.files.globusr.managed", "false");
+        System.setProperty(
+                "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS,
+                "d7c42580-6538-4605-9ad8-116a61982644/hdc1");
+
+        dataset = MocksFactory.makeDataset();
+        dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, authority, identifier, "/",
+                DOIServiceBean.DOI_RESOLVER_URL, null));
+        mDatafile = MocksFactory.makeDataFile();
+        mDatafile.setOwner(dataset);
+        mDatafile.setStorageIdentifier("globusm://" + baseStoreId1);
+
+        rDatafile = MocksFactory.makeDataFile();
+        rDatafile.setOwner(dataset);
+        rDatafile.setStorageIdentifier("globusr://" + baseStoreId2 + "//" + logoPath);
+        List<DataFile> files = new ArrayList<DataFile>();
+        files.add(mDatafile);
+        files.add(rDatafile);
+        dataset.setFiles(files);
+    }
+
+    @AfterEach
+    public void tearDown() {
+        System.clearProperty("dataverse.files.globusm." + GlobusAccessibleStore.TRANSFER_ENDPOINT_WITH_BASEPATH);
+        System.clearProperty("dataverse.files.globusm.managed");
+        System.clearProperty("dataverse.files.globusr.managed");
+        System.clearProperty(
+                "dataverse.files.globusr." + AbstractRemoteOverlayAccessIO.REFERENCE_ENDPOINTS_WITH_BASEPATHS);
+    }
+
+    
+    @Test
+    public void testgetFilesMap() {
+        
+        JsonObject jo = GlobusUtil.getFilesMap(dataset.getFiles(), dataset);
+        System.out.println(JsonUtil.prettyPrint(jo));
+        assertEquals(jo.getString(Long.toString(mDatafile.getId())), "d7c42580-6538-4605-9ad8-116a61982644/hdc1/10.5072/F2ABCDEF/182ad2bda2f-c3508e719076");
+        assertEquals(jo.getString(Long.toString(rDatafile.getId())), logoPath);
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java
index cb0655c068f..96e314324ab 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestFrequencyTest.java
@@ -6,9 +6,9 @@
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataFileReader;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
-import javax.ejb.EJB;
+import jakarta.ejb.EJB;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
@@ -16,8 +16,8 @@
 import java.io.BufferedInputStream;
 import java.util.Collection;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class IngestFrequencyTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
index ca68af4090c..4dfedf5aa17 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
@@ -17,13 +17,13 @@
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.validation.ConstraintViolation;
+
+import jakarta.validation.ConstraintViolation;
 import org.dataverse.unf.UNFUtil;
 import org.dataverse.unf.UnfException;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class IngestUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java
index ea9e378739b..11257f188fe 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestableDataCheckerTest.java
@@ -10,15 +10,18 @@
 import java.io.IOException;
 import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
-import org.apache.commons.io.FileUtils;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Rule;
-import org.junit.rules.TemporaryFolder;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.List;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -26,26 +29,23 @@
  */
 public class IngestableDataCheckerTest {
    
-    @Rule
-    public TemporaryFolder tempFolder = new TemporaryFolder();
-   
     public IngestableDataCheckerTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
 
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {     
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
         
      
@@ -57,14 +57,14 @@ private File createTempFile(String filename, String fileContents) throws IOExcep
         if (filename == null){
             return null;
         }
-        File fh = this.tempFolder.newFile(filename);
-        fh.createNewFile();
+        
+        Path tmpFile = Files.createTempFile("ingestdatachecker", "");
         
         if (fileContents != null){
-            FileUtils.writeStringToFile(fh, fileContents);
+            Files.writeString(tmpFile, fileContents, StandardCharsets.UTF_8);
         }
         
-        return fh;
+        return tmpFile.toFile();
     }
     
     private MappedByteBuffer createTempFileAndGetBuffer(String filename, String fileContents) throws IOException {
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractorTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractorTest.java
new file mode 100644
index 00000000000..343d7f39cf5
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/metadataextraction/impl/plugins/netcdf/NetcdfFileMetadataExtractorTest.java
@@ -0,0 +1,81 @@
+package edu.harvard.iq.dataverse.ingest.metadataextraction.impl.plugins.netcdf;
+
+import edu.harvard.iq.dataverse.DatasetFieldConstant;
+import edu.harvard.iq.dataverse.ingest.metadataextraction.FileMetadataIngest;
+import java.io.File;
+import java.util.Map;
+import java.util.Set;
+import static org.junit.jupiter.api.Assertions.*;
+import org.junit.jupiter.api.Test;
+
+public class NetcdfFileMetadataExtractorTest {
+
+    /**
+     * Expect some lat/long values (geospatial bounding box) with longtude
+     * values that have been transformed from a domain of 0 to 360 to a domain
+     * of -180 to 180.
+     */
+    @Test
+    public void testExtractLatLong() throws Exception {
+        String pathAndFile = "src/test/resources/netcdf/ICOADS_R3.0.0_1662-10.nc";
+        File file = new File(pathAndFile);
+        NetcdfFileMetadataExtractor instance = new NetcdfFileMetadataExtractor();
+        FileMetadataIngest netcdfMetadata = instance.ingestFile(file);
+        Map<String, Set<String>> map = netcdfMetadata.getMetadataMap();
+        assertEquals("-16.320007", map.get(DatasetFieldConstant.westLongitude).toArray()[0]);
+        assertEquals("-6.220001", map.get(DatasetFieldConstant.eastLongitude).toArray()[0]);
+        assertEquals("41.8", map.get(DatasetFieldConstant.southLatitude).toArray()[0]);
+        assertEquals("49.62", map.get(DatasetFieldConstant.northLatitude).toArray()[0]);
+    }
+
+    /**
+     * The NetCDF file under test doesn't have values for latitude/longitude
+     * (geospatial bounding box).
+     */
+    @Test
+    public void testExtractNoLatLong() throws Exception {
+        String pathAndFile = "src/test/resources/netcdf/madis-raob";
+        File file = new File(pathAndFile);
+        NetcdfFileMetadataExtractor instance = new NetcdfFileMetadataExtractor();
+        FileMetadataIngest netcdfMetadata = null;
+        netcdfMetadata = instance.ingestFile(file);
+        Map<String, Set<String>> map = netcdfMetadata.getMetadataMap();
+        assertNull(map.get(DatasetFieldConstant.westLongitude).toArray()[0]);
+        assertNull(map.get(DatasetFieldConstant.eastLongitude).toArray()[0]);
+        assertNull(map.get(DatasetFieldConstant.southLatitude).toArray()[0]);
+        assertNull(map.get(DatasetFieldConstant.northLatitude).toArray()[0]);
+    }
+
+    @Test
+    public void testStandardLongitude() {
+        NetcdfFileMetadataExtractor extractor = new NetcdfFileMetadataExtractor();
+
+        // Both are over 180. Subtract 360 from both.
+        // before: https://linestrings.com/bbox/#343.68,41.8,353.78,49.62
+        // after: https://linestrings.com/bbox/#-16.320007,41.8,-6.220001,49.62
+        assertEquals(new WestAndEastLongitude("-16.320007", "-6.220001"), extractor.getStandardLongitude(new WestAndEastLongitude("343.68", "353.78")));
+
+        // "If one of them is <0, the domain is -180:180." No change. https://linestrings.com/bbox/#-10,20,100,40
+        assertEquals(new WestAndEastLongitude("-10", "100"), extractor.getStandardLongitude(new WestAndEastLongitude("-10", "100")));
+
+        // Both are negative. No change. https://linestrings.com/bbox/#-124.7666666333333,25.066666666666666,-67.058333300000015,49.40000000000000
+        assertEquals(new WestAndEastLongitude("-124.7666666333333", "-67.058333300000015"), extractor.getStandardLongitude(new WestAndEastLongitude("-124.7666666333333", "-67.058333300000015")));
+
+        // Both between 0 and 180. Leave it alone. No change. https://linestrings.com/bbox/#25,20,35,40
+        assertEquals(new WestAndEastLongitude("25", "35"), extractor.getStandardLongitude(new WestAndEastLongitude("25", "35")));
+
+        // When only one value is over 180 we can't know if we should subtract 360 from both.
+        // Expect null. Don't insert potentially incorrect data into the database. https://linestrings.com/bbox/#100,20,181,40
+        assertEquals(null, extractor.getStandardLongitude(new WestAndEastLongitude("100", "181")));
+
+        // "If one of them is <0, the domain is -180:180." No change. https://linestrings.com/bbox/#-10,20,181,40
+        assertEquals(null, extractor.getStandardLongitude(new WestAndEastLongitude("-10", "181")));
+
+        // Both values are less than -180 and out of range. Expect null. No database insert https://linestrings.com/bbox/#999,20,-888,40
+        assertEquals(null, extractor.getStandardLongitude(new WestAndEastLongitude("-999", "-888")));
+
+        // Garbage in, garbage out. You can't bass "foo" and "bar" as longitudes
+        assertEquals(null, extractor.getStandardLongitude(new WestAndEastLongitude("foo", "bar")));
+    }
+
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java
index cdc4249ba94..fc066ef195e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/csv/CSVFileReaderTest.java
@@ -22,8 +22,9 @@
 import java.util.logging.Logger;
 import org.dataverse.unf.UNFUtil;
 import org.dataverse.unf.UnfException;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -67,7 +68,7 @@ public void testRead() {
             } catch (IOException ex) {
                 fail();
             }
-            assertEquals("Error on line " + line, expLine, foundLine);
+            assertEquals(expLine, foundLine, "Error on line " + line);
             line++;
         }
 
@@ -121,15 +122,15 @@ public void testVariables() {
         // OK, let's go through the individual variables:
         for (int i = 0; i < result.getVarQuantity(); i++) {
 
-            assertEquals("variable " + i + ":", expectedVariableNames[i], result.getDataVariables().get(i).getName());
+            assertEquals(expectedVariableNames[i], result.getDataVariables().get(i).getName(), "variable " + i + ":");
 
-            assertEquals("variable " + i + ":", expectedVariableTypes[i], result.getDataVariables().get(i).getType());
+            assertEquals(expectedVariableTypes[i], result.getDataVariables().get(i).getType(), "variable " + i + ":");
 
-            assertEquals("variable " + i + ":", expectedVariableIntervals[i], result.getDataVariables().get(i).getInterval());
+            assertEquals(expectedVariableIntervals[i], result.getDataVariables().get(i).getInterval(), "variable " + i + ":");
 
-            assertEquals("variable " + i + ":", expectedVariableFormatCategories[i], result.getDataVariables().get(i).getFormatCategory());
+            assertEquals(expectedVariableFormatCategories[i], result.getDataVariables().get(i).getFormatCategory(), "variable " + i + ":");
 
-            assertEquals("variable " + i + ":", expectedVariableFormats[i], result.getDataVariables().get(i).getFormat());
+            assertEquals(expectedVariableFormats[i], result.getDataVariables().get(i).getFormat(), "variable " + i + ":");
         }
     }
 
@@ -196,7 +197,7 @@ public void testSubset() {
 
             Double[] columnVector = TabularSubsetGenerator.subsetDoubleVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue());
 
-            assertArrayEquals("column " + i + ":", floatVectors[vectorCount++], columnVector);
+            assertArrayEquals(floatVectors[vectorCount++], columnVector, "column " + i + ":");
         }
 
         // Discrete Numerics (aka, integers):
@@ -230,7 +231,7 @@ public void testSubset() {
 
             Long[] columnVector = TabularSubsetGenerator.subsetLongVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue());
 
-            assertArrayEquals("column " + i + ":", longVectors[vectorCount++], columnVector);
+            assertArrayEquals(longVectors[vectorCount++], columnVector, "column " + i + ":");
         }
 
         // And finally, Strings:
@@ -257,7 +258,7 @@ public void testSubset() {
 
             String[] columnVector = TabularSubsetGenerator.subsetStringVector(generatedTabInputStream, i, generatedDataTable.getCaseQuantity().intValue());
 
-            assertArrayEquals("column " + i + ":", stringVectors[vectorCount++], columnVector);
+            assertArrayEquals(stringVectors[vectorCount++], columnVector, "column " + i + ":");
         }
     }
 
@@ -387,7 +388,7 @@ public void testVariableUNFs() {
                 }
             }
 
-            assertEquals("Variable number " + i + ":", expectedUNFs[i], unf);
+            assertEquals(expectedUNFs[i], unf, "Variable number " + i + ":");
         }
 
     }
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java
index 2f8908c5920..113e9be6b54 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DTAFileReaderTest.java
@@ -5,8 +5,9 @@
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class DTAFileReaderTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReaderTest.java
index 8ac84d9693a..a181f73c058 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/DataReaderTest.java
@@ -4,8 +4,8 @@
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * @author oscardssmith
@@ -39,12 +39,12 @@ public void testReadUShort() throws IOException {
     }
     
     // This should throw until we figure out what to do with uLongs that are large
-    @Test(expected = IOException.class)
-    public void testReadULong() throws IOException {
+    @Test
+    void testReadULong() throws IOException {
         byte[] bytes = {-1,-1,-1,-1,-1,-1,-1,-1,};
         BufferedInputStream stream = new BufferedInputStream(new ByteArrayInputStream(bytes));
         DataReader reader = new DataReader(stream);
         reader.setLSF(true);
-        assertEquals(-1, reader.readULong());
+        assertThrows(IOException.class, () -> reader.readULong());
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java
index 3c8c0a0d224..c963346b05e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/tabulardata/impl/plugins/dta/NewDTAFileReaderTest.java
@@ -4,19 +4,16 @@
 import edu.harvard.iq.dataverse.datavariable.DataVariable;
 import edu.harvard.iq.dataverse.datavariable.VariableCategory;
 import edu.harvard.iq.dataverse.ingest.tabulardata.TabularDataIngest;
-import edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.dta.DataReader;
+
 import java.io.BufferedInputStream;
-import java.io.ByteArrayInputStream;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.List;
 import org.apache.commons.io.FileUtils;
-import org.junit.Test;
-import static org.junit.Assert.*;
-import org.junit.Ignore;
-import org.junit.Assert;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class NewDTAFileReaderTest {
     NewDTAFileReader instance;
@@ -51,7 +48,7 @@ public void testStrl() throws IOException {
         
         String[] vars = {"make","price","mpg","rep78","trunk","gear_ratio","strls"};
         String[] actualVars = table.getDataVariables().stream().map((var) -> var.getName()).toArray(String[]::new);
-        Assert.assertArrayEquals(vars, actualVars);
+        assertArrayEquals(vars, actualVars);
         String expected = "\"Buick LeSabre\"	5788	1.1111111111111111E21	100	32767	2.73	\"a\"\n" +
                           "\"Buick Opel\"	4453	26.0		10	2.87	\"bb\"\n" +
                           "\"Buick Regal\"	5189	20.0	3	16	2.93	\"ccc\"\n";
@@ -69,7 +66,7 @@ public void testDates() throws IOException {
         assertEquals(4, (long)table.getCaseQuantity());
         String[] vars = {"Clock","Daily","Weekly","Monthly","Quarterly","BiAnnually","Annually"};
         String[] actualVars = table.getDataVariables().stream().map((var) -> var.getName()).toArray(String[]::new);
-        Assert.assertArrayEquals(vars, actualVars);
+        assertArrayEquals(vars, actualVars);
         String expected = "2595-09-27 06:58:52.032	2018-06-20	2018-11-05	2018-06-01	2018-01-01	2018-01-01	2018\n" +
                           "2595-09-27 06:58:52.032	2018-06-20	2018-11-05	2018-06-01	2018-04-01	2018-01-01	2018\n" +
                           "2595-09-27 06:58:52.032	2018-06-20	2018-11-05	2018-06-01	2018-07-01	2018-07-01	2018\n" +
@@ -77,14 +74,14 @@ public void testDates() throws IOException {
         assertEquals(expected, FileUtils.readFileToString(result.getTabDelimitedFile()));
     }
     
-    @Test(expected = IOException.class)
-    public void testNull() throws IOException {
+    @Test
+    void testNull() {
         instance = new NewDTAFileReader(null, 117);
-        TabularDataIngest result = instance.read(null, new File(""));
+        assertThrows(IOException.class, () -> instance.read(null, new File("")));
     }
 
     // TODO: Can we create a small file to check into the code base that exercises the value-label names non-zero offset issue?
-    @Ignore
+    @Disabled
     @Test
     public void testFirstCategoryNonZeroOffset() throws IOException {
         instance = new NewDTAFileReader(null, 117);
@@ -105,7 +102,7 @@ public void testFirstCategoryNonZeroOffset() throws IOException {
     }
 
     // TODO: Can we create a small file to check into the code base that exercises the value-label names non-zero offset issue?
-    @Ignore
+    @Disabled
     @Test
     public void testFirstCategoryNonZeroOffset1() throws IOException {
         instance = new NewDTAFileReader(null, 118);
@@ -125,7 +122,7 @@ public void testFirstCategoryNonZeroOffset1() throws IOException {
     }
     
     // TODO: Is there a way to exersise this code with a smaller file? 33k.dta is 21MB.
-    @Ignore
+    @Disabled
     @Test
     public void test33k() throws IOException {
         instance = new NewDTAFileReader(null, 119);
@@ -135,7 +132,7 @@ public void test33k() throws IOException {
     
     // TODO: Can we create a small file to check into the code base that exercises the characteristics issue?
     // FIXME: testCharacteristics is passing in DTA117FileReaderTest but not here.
-    @Ignore
+    @Disabled
     @Test
     public void testCharacteristics() throws IOException {
         instance = new NewDTAFileReader(null, 117);
diff --git a/src/test/java/edu/harvard/iq/dataverse/locality/StorageSiteUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/locality/StorageSiteUtilTest.java
index eb9562a2a69..b2f70ba2675 100644
--- a/src/test/java/edu/harvard/iq/dataverse/locality/StorageSiteUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/locality/StorageSiteUtilTest.java
@@ -3,9 +3,12 @@
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import java.util.ArrayList;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 public class StorageSiteUtilTest {
 
@@ -22,44 +25,47 @@ public void testParse() throws Exception {
         System.out.println("output: " + output);
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testMissingHostname() throws Exception {
+    @Test
+    void testMissingHostname() {
         JsonObjectBuilder job = Json.createObjectBuilder();
         job.add(StorageSite.NAME, "myName");
         job.add(StorageSite.PRIMARY_STORAGE, true);
         job.add(StorageSite.TRANSFER_PROTOCOLS, "rsync");
-        StorageSiteUtil.parse(job.build());
+        JsonObject sut = job.build();
+        assertThrows(IllegalArgumentException.class, () -> StorageSiteUtil.parse(sut));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testBadProtocol() throws Exception {
+    @Test
+    void testBadProtocol() {
         JsonObjectBuilder job = Json.createObjectBuilder();
         job.add(StorageSite.HOSTNAME, "myHostname");
         job.add(StorageSite.NAME, "myName");
         job.add(StorageSite.PRIMARY_STORAGE, true);
         job.add(StorageSite.TRANSFER_PROTOCOLS, "junk");
-        StorageSiteUtil.parse(job.build());
+        JsonObject sut = job.build();
+        assertThrows(IllegalArgumentException.class, () -> StorageSiteUtil.parse(sut));
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testNonBoolean() throws Exception {
+    @Test
+    void testNonBoolean() {
         JsonObjectBuilder job = Json.createObjectBuilder();
         job.add(StorageSite.HOSTNAME, "myHostname");
         job.add(StorageSite.NAME, "myName");
         job.add(StorageSite.PRIMARY_STORAGE, "not a boolean");
         job.add(StorageSite.TRANSFER_PROTOCOLS, "rsync");
-        StorageSiteUtil.parse(job.build());
+        JsonObject sut = job.build();
+        assertThrows(IllegalArgumentException.class, () -> StorageSiteUtil.parse(sut));
     }
 
-    @Test(expected = Exception.class)
-    public void testSecondPrimaryNotAllowed() throws Exception {
+    @Test
+    void testSecondPrimaryNotAllowed() {
         StorageSite newStorageSite = new StorageSite();
         newStorageSite.setPrimaryStorage(true);
         List<StorageSite> exitingSites = new ArrayList<>();
         StorageSite existingSite1 = new StorageSite();
         existingSite1.setPrimaryStorage(true);
         exitingSites.add(existingSite1);
-        StorageSiteUtil.ensureOnlyOnePrimary(newStorageSite, exitingSites);
+        assertThrows(Exception.class, () -> StorageSiteUtil.ensureOnlyOnePrimary(newStorageSite, exitingSites));
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBeanTest.java
index 6fa9ff1a8e9..61be14f41aa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBeanTest.java
@@ -3,11 +3,11 @@
 import edu.harvard.iq.dataverse.Dataset;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
-import org.mockito.Matchers;
 
-import javax.ejb.EJBException;
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
+import jakarta.ejb.EJBException;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.Query;
+import org.mockito.ArgumentMatchers;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -36,7 +36,7 @@ void setup() {
     @Test
     void testGetDatasetMetricsByDatasetMonthCountry_withoutResults() {
         when(query.getResultList()).thenReturn(new ArrayList());
-        when(this.serviceBean.em.createQuery(Matchers.anyString())).thenReturn(query);
+        when(this.serviceBean.em.createQuery(ArgumentMatchers.anyString())).thenReturn(query);
 
         assertNull(serviceBean.getDatasetMetricsByDatasetMonthCountry(dataset, "01-01", "CH"));
     }
@@ -44,7 +44,7 @@ void testGetDatasetMetricsByDatasetMonthCountry_withoutResults() {
     @Test
     void testGetDatasetMetricsByDatasetMonthCountry_throwsForMultipleResults() {
         when(query.getResultList()).thenReturn(Arrays.asList(1, 2));
-        when(this.serviceBean.em.createQuery(Matchers.anyString())).thenReturn(query);
+        when(this.serviceBean.em.createQuery(ArgumentMatchers.anyString())).thenReturn(query);
 
         assertThrows(EJBException.class, () -> {
             serviceBean.getDatasetMetricsByDatasetMonthCountry(dataset, "01-01", "CH");
@@ -65,7 +65,7 @@ void testGetDatasetMetricsByDatasetMonthCountry_aggregatesForSingleResult() {
         datasetMetrics.setDownloadsUniqueMachine(8L);
 
         when(query.getResultList()).thenReturn(Arrays.asList(datasetMetrics));
-        when(this.serviceBean.em.createQuery(Matchers.anyString())).thenReturn(query);
+        when(this.serviceBean.em.createQuery(ArgumentMatchers.anyString())).thenReturn(query);
 
         DatasetMetrics result = serviceBean.getDatasetMetricsByDatasetMonthCountry(dataset, "04.2019", "CH");
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java
index eb15db883c7..c1051a57db8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountLoggingServiceBeanTest.java
@@ -16,11 +16,13 @@
 import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import java.util.Date;
-import javax.faces.context.FacesContext;
+
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.CoreMatchers.not;
-import static org.junit.Assert.assertThat;
-import org.junit.Test;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+import org.hamcrest.MatcherAssert;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -40,7 +42,7 @@ public void testMainAndFileConstructor() {
         dataset.setAuthority("Authority");
         dataset.setProtocol("Protocol");
         dataset.setIdentifier("Identifier"); 
-        GlobalId id = new GlobalId(dataset);
+        GlobalId id = dataset.getGlobalId();
         dataset.setGlobalId(id);
         dvVersion.setDataset(dataset);
         dvVersion.setAuthorsStr("OneAuthor;TwoAuthor");
diff --git a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtilTest.java
index 4e034f0d314..56e786714b6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/makedatacount/MakeDataCountUtilTest.java
@@ -4,10 +4,11 @@
 import java.io.FileReader;
 import java.io.IOException;
 import java.util.List;
-import javax.json.Json;
-import javax.json.JsonObject;
-import org.junit.Assert;
-import org.junit.Test;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class MakeDataCountUtilTest {
 
@@ -30,7 +31,7 @@ public void testParseCitations() {
         try (FileReader reader = new FileReader("src/test/java/edu/harvard/iq/dataverse/makedatacount/citations-for-doi-10.7910-DVN-HQZOOB.json")) {
             report = Json.createReader(reader).readObject();
             List<DatasetExternalCitations> datasetExternalCitations = MakeDataCountUtil.parseCitations(report);
-            Assert.assertEquals(2, datasetExternalCitations.size());
+            assertEquals(2, datasetExternalCitations.size());
         } catch (FileNotFoundException ex) {
             System.out.print("File not found: " + ex.getMessage());
         } catch (IOException ex) {
diff --git a/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
index 9aa4c9c6723..484ce2ebe47 100644
--- a/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/metrics/MetricsUtilTest.java
@@ -8,21 +8,19 @@
 import java.util.List;
 import java.util.Arrays;
 import java.util.Collection;
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-
-@RunWith(Enclosed.class)
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.CsvSource;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.fail;
+
 public class MetricsUtilTest {
 
     public static class MetricsUtilNoParamTest {
@@ -146,23 +144,23 @@ public void testDataversesBySubjectToJson() {
         }
 
         @Test
-        public void testSanitizeHappyPath() throws Exception {
+        void testSanitizeHappyPath() {
             assertEquals("2018-04", MetricsUtil.sanitizeYearMonthUserInput("2018-04"));
         }
 
-        @Test(expected = Exception.class)
-        public void testSanitizeJunk() throws Exception {
-            MetricsUtil.sanitizeYearMonthUserInput("junk");
+        @Test
+        void testSanitizeJunk() {
+            assertThrows(Exception.class, () -> MetricsUtil.sanitizeYearMonthUserInput("junk"));
         }
 
-        @Test(expected = Exception.class)
-        public void testSanitizeFullIso() throws Exception {
-            MetricsUtil.sanitizeYearMonthUserInput("2018-01-01");
+        @Test
+        void testSanitizeFullIso() {
+            assertThrows(Exception.class, () -> MetricsUtil.sanitizeYearMonthUserInput("2018-01-01"));
         }
 
-        @Test(expected = Exception.class)
-        public void testSanitizeYearMonthUserInputIsAfterCurrentDate() throws Exception {
-            MetricsUtil.sanitizeYearMonthUserInput("2099-01");
+        @Test
+        void testSanitizeYearMonthUserInputIsAfterCurrentDate() {
+            assertThrows(Exception.class, () -> MetricsUtil.sanitizeYearMonthUserInput("2099-01"));
         }
 
         @Test
@@ -207,42 +205,20 @@ public void testStringToJsonObjectBuilder() {
         }
 
     }
-
-    @RunWith(Parameterized.class)
-    public static class ValidateDataLocationStringTypeTest {
-        @Parameter
-        public String dataLocation;
-
-        @Parameter(1)
-        public boolean isExceptionExpected;
-
-        @Parameter(2)
-        public String expectedOutput;
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(new Object[][] { 
-                { "local", false, "local" }, 
-                { "remote", false, "remote" },
-                { "all", false, "all" }, 
-                { null, false, "local" }, 
-                { "", false, "local" },
-                { "abcd", true, null } 
-            });
-        }
-
-        @Test
-        public void testValidateDataLocationStringType() {
-            try {
-                assertEquals(expectedOutput, MetricsUtil.validateDataLocationStringType(dataLocation));
-            } catch (Exception e) {
-                if (isExceptionExpected) {
-                    return;
-                } else {
-                    fail("should not throw an exception!");
-                }
-            }
-        }
-
+    
+    @ParameterizedTest
+    @CsvSource(value = {
+        "local,false,local",
+        "remote,false,remote",
+        "all,false,all",
+        "NULL,false,local",
+        "'',false,local",
+        "abcd,true,NULL"
+    }, nullValues = "NULL")
+    void testValidateDataLocationStringType(String dataLocation, boolean isExceptionExpected, String expectedOutput) {
+        if (isExceptionExpected)
+            assertThrows(Exception.class, () -> MetricsUtil.validateDataLocationStringType(dataLocation));
+        else
+            assertEquals(expectedOutput, MetricsUtil.validateDataLocationStringType(dataLocation));
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/mocks/MockDatasetFieldSvc.java b/src/test/java/edu/harvard/iq/dataverse/mocks/MockDatasetFieldSvc.java
index a8177537d5f..22936def497 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mocks/MockDatasetFieldSvc.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mocks/MockDatasetFieldSvc.java
@@ -3,7 +3,7 @@
 import java.util.HashMap;
 import java.util.Map;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
 import edu.harvard.iq.dataverse.ControlledVocabularyValue;
 import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
diff --git a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
index cc4740e564c..927d288d660 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mocks/MocksFactory.java
@@ -143,6 +143,7 @@ public static Dataset makeDataset() {
         Dataset ds = new Dataset();
         ds.setId( nextId() );
         ds.setIdentifier("sample-ds-" + ds.getId() );
+        ds.setAuthority("10.5072");
         ds.setCategoriesByName( Arrays.asList("CatOne", "CatTwo", "CatThree") );
         final List<DataFile> files = makeFiles(10);
         final List<FileMetadata> metadatas = new ArrayList<>(10);
@@ -194,6 +195,9 @@ public static DatasetFieldType makeDatasetFieldType() {
         final Long id = nextId();
         DatasetFieldType retVal = new DatasetFieldType("SampleType-"+id, FieldType.TEXT, false);
         retVal.setId(id);
+        MetadataBlock mdb = new MetadataBlock();
+        mdb.setName("Test");
+        retVal.setMetadataBlock(mdb);
         return retVal;
     }
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/mydata/MyDataUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/mydata/MyDataUtilTest.java
index 69996ce71fe..8cf5b0a3f44 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mydata/MyDataUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mydata/MyDataUtilTest.java
@@ -1,88 +1,82 @@
 package edu.harvard.iq.dataverse.mydata;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeTrue;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.EmptySource;
+import org.junit.jupiter.params.provider.MethodSource;
+import org.junit.jupiter.params.provider.NullSource;
 
-import org.junit.experimental.theories.DataPoints;
-import org.junit.experimental.theories.Theories;
-import org.junit.experimental.theories.Theory;
-import org.junit.runner.RunWith;
+import java.util.List;
 
-/**
- * Theories allows to add more formal tests to our code. In a way JUnit Theories behave 
- * much like mathematical theories that hold for every element of a large (infinite) set. 
- * JUnit will combine every possible combination (cartesian product) of datapoints and 
- * pass these to the tests annotated with @Theory. The assume statements make sure, only 
- * valid datapoints are tested in each Theory.
- * 
- * @Datapoints - defines an array of values to test on
- * @Datapoint - stores one single value
- * 
- * JUnit will no longer maintain a JUnit 4 Theories equivalent in the JUnit 5 codebase, as 
- * mentioned in a discussion here: https://github.com/junit-team/junit5/pull/1422#issuecomment-389644868
- */
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
 
-@RunWith(Theories.class)
-public class MyDataUtilTest {
+class MyDataUtilTest {
 
-    @DataPoints
-    public static String[] userIdentifier = { 
-        "@nzaugg", "nzaugg@", "nzaugg", "123nzaugg", "", " ", null, "@",  "n" };
+    static List<String> userIdentifier() {
+        return List.of("@nzaugg", "nzaugg@", "nzaugg", "123nzaugg", " ", "@", "n");
+    }
 
-    @Theory
-    public void testFormatUserIdentifierAsAssigneeIdentifierNull(String userIdentifier) {
-        assumeTrue(userIdentifier == null);
+    @ParameterizedTest
+    @NullSource
+    void testFormatUserIdentifierAsAssigneeIdentifierNull(String userIdentifier) {
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierAsAssigneeIdentifier(userIdentifier);
-        assertTrue(formattedUserIdentifier ==  null);
+        assertNull(formattedUserIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierAsAssigneeIdentifierOneCharString(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    void testFormatUserIdentifierAsAssigneeIdentifierOneCharString(String userIdentifier) {
         assumeTrue(userIdentifier.startsWith("@"));
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierAsAssigneeIdentifier(userIdentifier);
-        assertTrue(formattedUserIdentifier.equals(userIdentifier));
+        assertEquals(formattedUserIdentifier, userIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierAsAssigneeIdentifier(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    void testFormatUserIdentifierAsAssigneeIdentifier(String userIdentifier) {
         assumeTrue(!userIdentifier.startsWith("@"));
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierAsAssigneeIdentifier(userIdentifier);
-        assertTrue(formattedUserIdentifier.equals("@" + userIdentifier));
+        assertEquals(formattedUserIdentifier, "@" + userIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierForMyDataFormNull(String userIdentifier) {
-        assumeTrue(userIdentifier == null);
+    
+    @ParameterizedTest
+    @NullSource
+    void testFormatUserIdentifierForMyDataFormNull(String userIdentifier) {
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierForMyDataForm(userIdentifier);
-        assertTrue(formattedUserIdentifier ==  null);
+        assertNull(formattedUserIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierForMyDataFormOneCharString(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    void testFormatUserIdentifierForMyDataFormOneCharString(String userIdentifier) {
         assumeTrue(userIdentifier.startsWith("@"));
         assumeTrue(userIdentifier.length() == 1);
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierForMyDataForm(userIdentifier);
-        assertTrue(formattedUserIdentifier ==  null);
+        assertNull(formattedUserIdentifier);
     }
-
-    @Theory
-    public void testFormatUserIdentifierForMyDataFormLongerString(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    void testFormatUserIdentifierForMyDataFormLongerString(String userIdentifier) {
         assumeTrue(userIdentifier.startsWith("@"));
         assumeTrue(userIdentifier.length() > 1);
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierForMyDataForm(userIdentifier);
-        assertTrue(formattedUserIdentifier.equals(userIdentifier.substring(1)));
+        assertEquals(formattedUserIdentifier, userIdentifier.substring(1));
     }
-
-    @Theory
-    public void testFormatUserIdentifierForMyDataForm(String userIdentifier) {
-        assumeTrue(userIdentifier != null);
+    
+    @ParameterizedTest
+    @MethodSource("userIdentifier")
+    @EmptySource
+    void testFormatUserIdentifierForMyDataForm(String userIdentifier) {
         assumeTrue(!userIdentifier.startsWith("@"));
+        
         String formattedUserIdentifier = MyDataUtil.formatUserIdentifierForMyDataForm(userIdentifier);
-        assertTrue(formattedUserIdentifier.equals(userIdentifier));
+        assertEquals(formattedUserIdentifier, userIdentifier);
     }
 
 }
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/mydata/SolrQueryFormatterTest.java b/src/test/java/edu/harvard/iq/dataverse/mydata/SolrQueryFormatterTest.java
index c15bc280316..789204c1db4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/mydata/SolrQueryFormatterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/mydata/SolrQueryFormatterTest.java
@@ -5,8 +5,9 @@
  */
 package edu.harvard.iq.dataverse.mydata;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.lang.NullPointerException;
 import java.util.ArrayList;
@@ -16,20 +17,15 @@
 import java.util.List;
 import java.util.Random;
 import java.util.Set;
+import java.util.stream.Stream;
 
 import org.apache.commons.lang3.StringUtils;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-
-/**
- *
- * @author rmp553
- */
-@RunWith(Enclosed.class)
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
+
 public class SolrQueryFormatterTest {
 
     public static class SolrQueryFormatterNoParamTest {
@@ -130,28 +126,7 @@ private void msgt(String s){
         }
     }
 
-    @RunWith(Parameterized.class)
-    public static class SolrQueryFormatterParamTest {
-
-        @Parameter(0)
-        public List<Long> sliceOfIds;
-
-        @Parameter(1)
-        public String paramName;
-
-        @Parameter(2)
-        public String dvObjectType;
-
-        // may be either
-        //  (i) the expected query part or
-        // (ii) the expected exception message
-        @Parameter(3)
-        public String expectedResult;
-
-        @Parameter(4)
-        public Class expectedException;
-
-        @Parameters
+    /*
         public static Collection data() {
             // The following list of test cases was compiled using the interface-based approach for input-space partition.
             // Therefor, for every input parameter, the domain of possible values was partitioned into different sets:
@@ -212,24 +187,90 @@ public static Collection data() {
                 { new ArrayList<Long>(Arrays.asList(1L, null)), "paramName", "dvObjectType", "(paramName:(1) AND dvObjectType:(dvObjectType))", null },
             });
         }
-
-        @Test
-        public void testFormatIdsForSolrClause() {
-            SolrQueryFormatter sqf = new SolrQueryFormatter();
-
-            if (expectedException == null) {
-                assertEquals(expectedResult, sqf.formatIdsForSolrClause(sliceOfIds, paramName, dvObjectType));
-                return;
-            }
-
-            try {
-                sqf.formatIdsForSolrClause(sliceOfIds, paramName, dvObjectType);
-                fail("Expected exception (" + expectedException.toString() + ") was not thrown");
-            } catch (Exception ex) {
-                assertEquals("verify the exception class", expectedException, ex.getClass());
-                assertEquals("verify the exception message", expectedResult, ex.getMessage());
-            }
+     */
+    
+    /*
+     * The following list of test cases was compiled using the interface-based approach for input-space partition.
+     * Therefor, for every input parameter, the domain of possible values was partitioned into different sets:
+     *    - sliceOfIds   (5 sets): null, empty, non-empty with null values only, non-empty with Long values only, non-empty with both null and Long values
+     *    - paramName    (3 sets): null, empty, non-empty
+     *    - dvObjectType (3 sets): null, empty, non-empty
+     * Then, for every set, a representative value was chosen and combined with every other set (3*3*5 = 45 test cases).
+     */
+    static Stream<Arguments> data() {
+        return Stream.of(
+            // sliceOfIds                   paramName    dvObjectType    expectedResult                                     expectedException
+            Arguments.of(null,              null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(null,              null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(null,              null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(null,              "",          null,           "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "",          "",             "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "",          "dvObjectType", "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "paramName", null,           "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "paramName", "",             "sliceOfIds cannot be null",                       NullPointerException.class),
+            Arguments.of(null,              "paramName", "dvObjectType", "sliceOfIds cannot be null",                       NullPointerException.class),
+            
+            Arguments.of(list(),            null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(),            null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(),            null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(),            "",          null,           "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "",          "",             "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "",          "dvObjectType", "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "paramName", null,           "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "paramName", "",             "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            Arguments.of(list(),            "paramName", "dvObjectType", "sliceOfIds must have at least 1 value",           IllegalStateException.class),
+            
+            Arguments.of(list((Long) null), null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list((Long) null), null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list((Long) null), null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list((Long) null), "",          null,           "(:())",                                           null),
+            Arguments.of(list((Long) null), "",          "",             "(:() AND dvObjectType:())",                       null),
+            Arguments.of(list((Long) null), "",          "dvObjectType", "(:() AND dvObjectType:(dvObjectType))",           null),
+            Arguments.of(list((Long) null), "paramName", null,           "(paramName:())",                                  null),
+            Arguments.of(list((Long) null), "paramName", "",             "(paramName:() AND dvObjectType:())",              null),
+            Arguments.of(list((Long) null), "paramName", "dvObjectType", "(paramName:() AND dvObjectType:(dvObjectType))",  null),
+            
+            Arguments.of(list(1L),          null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L),          null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L),          null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L),          "",          null,           "(:(1))",                                          null),
+            Arguments.of(list(1L),          "",          "",             "(:(1) AND dvObjectType:())",                      null),
+            Arguments.of(list(1L),          "",          "dvObjectType", "(:(1) AND dvObjectType:(dvObjectType))",          null),
+            Arguments.of(list(1L),          "paramName", null,           "(paramName:(1))",                                 null),
+            Arguments.of(list(1L),          "paramName", "",             "(paramName:(1) AND dvObjectType:())",             null),
+            Arguments.of(list(1L),          "paramName", "dvObjectType", "(paramName:(1) AND dvObjectType:(dvObjectType))", null),
+            
+            Arguments.of(list(1L, null),    null,        null,           "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L, null),    null,        "",             "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L, null),    null,        "dvObjectType", "paramName cannot be null",                        NullPointerException.class),
+            Arguments.of(list(1L, null),    "",          null,           "(:(1))",                                          null),
+            Arguments.of(list(1L, null),    "",          "",             "(:(1) AND dvObjectType:())",                      null),
+            Arguments.of(list(1L, null),    "",          "dvObjectType", "(:(1) AND dvObjectType:(dvObjectType))",          null),
+            Arguments.of(list(1L, null),    "paramName", null,           "(paramName:(1))",                                 null),
+            Arguments.of(list(1L, null),    "paramName", "",             "(paramName:(1) AND dvObjectType:())",             null),
+            Arguments.of(list(1L, null),    "paramName", "dvObjectType", "(paramName:(1) AND dvObjectType:(dvObjectType))", null)
+        );
+    }
+    
+    /**
+     * @param expectedResult May either be (i) the expected query part or (ii) the expected exception message
+     */
+    @ParameterizedTest
+    @MethodSource("data")
+    void testFormatIdsForSolrClause(List<Long> sliceOfIds, String paramName, String dvObjectType,
+                                    String expectedResult, Class<Throwable> expectedException) {
+        SolrQueryFormatter sqf = new SolrQueryFormatter();
+        
+        if (expectedException == null) {
+            assertEquals(expectedResult, sqf.formatIdsForSolrClause(sliceOfIds, paramName, dvObjectType));
+            return;
         }
-
+        
+        Throwable e = assertThrows(expectedException, () -> sqf.formatIdsForSolrClause(sliceOfIds, paramName, dvObjectType));
+        assertEquals(expectedResult, e.getMessage());
+    }
+    
+    static List<Long> list(Long... args) {
+        return Arrays.asList(args);
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetDataTest.java b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetDataTest.java
index 9d5b5e0e70e..d7831003142 100644
--- a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetDataTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetDataTest.java
@@ -5,31 +5,32 @@
  */
 package edu.harvard.iq.dataverse.passwordreset;
 
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 public class PasswordResetDataTest {
 
     public PasswordResetDataTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBeanTest.java
index d7da02e4459..4fbd2352d09 100644
--- a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordResetServiceBeanTest.java
@@ -10,10 +10,9 @@
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.mockito.ArgumentMatchers;
-import org.mockito.Matchers;
 
-import javax.persistence.EntityManager;
-import javax.persistence.TypedQuery;
+import jakarta.persistence.EntityManager;
+import jakarta.persistence.TypedQuery;
 import java.util.Arrays;
 import java.util.List;
 
@@ -97,7 +96,7 @@ void testAttemptPasswordReset_withNullNewPassword() {
 
     @Test
     void testAttemptPasswordReset_withValidationErrors() {
-        when(mockedPasswordValidatorServiceBean.validate(Matchers.anyString())).thenReturn(Arrays.asList("error"));
+        when(mockedPasswordValidatorServiceBean.validate(ArgumentMatchers.anyString())).thenReturn(Arrays.asList("error"));
 
         PasswordChangeAttemptResponse passwordChangeAttemptResponse = passwordResetServiceBean.attemptPasswordReset(new BuiltinUser(), "newpass", "token");
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordValidatorTest.java
index f6d02e35ddf..c15f7fa95e9 100644
--- a/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/passwordreset/PasswordValidatorTest.java
@@ -1,72 +1,50 @@
 package edu.harvard.iq.dataverse.passwordreset;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-import java.util.Arrays;
-import java.util.Collection;
+import java.util.stream.Stream;
 
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
-@RunWith(Parameterized.class)
 public class PasswordValidatorTest {
-
-    public String password;
-    public boolean expected;
-    public boolean mustContainSpecialCharacters;
-    public boolean mustContainCapitalLetters;
-    public boolean mustContainNumbers;
-    public int minLength;
-    public int maxLength;
-
-    public PasswordValidatorTest(String password, boolean expected, boolean mustContainSpecialCharacters,
-            boolean mustContainCapitalLetters, boolean mustContainNumbers, int minLength, int maxLength) {
-        this.password = password;
-        this.expected = expected;
-        this.mustContainSpecialCharacters = mustContainSpecialCharacters;
-        this.mustContainCapitalLetters = mustContainCapitalLetters;
-        this.mustContainNumbers = mustContainNumbers;
-        this.minLength = minLength;
-        this.maxLength = maxLength;
-    }
-
-    @Parameters
-    public static Collection<Object[]> parameters() {
-        return Arrays.asList(
-             new Object[][] {
-                // Check if PasswordValidator correctly validates correct passwords
-                // with all combinations of Special Characters,
-                // Capital Letters and Numbers
-                {"abcdefghabcdefgh", true, false, false, false, 8, 30},    
-                {"@bcdefgh@bcdefgh", true, true, false, false, 8, 30},      
-                {"@bAdefgh@bAdefgh", true, true, true, false, 8, 30},      
-                {"abAdefghabAdefgh", true, false, true, false, 8, 30},     
-                {"a1Adefgha1Adefgh", true, false, true, true, 8, 30},      
-                {"ab1defghab1defgh", true, false, false, true, 8, 30},     
-                {"@1cdefgh@1cdefgh", true, true, false, true, 8, 30},      
-                {"@1Adefgh@1Adefgh", true, true, true, true, 8, 30},      
-                // Check if PasswordValidator correctly rejects wrong passwords
-                // with all combinations of Special Characters,
-                // Capital Letters and Numbers
-                {"abcabc", false, false, false, false, 8, 30},
-                {"abcdabcd", false, true, false, false, 8, 30},       
-                {"@bcd@bcd", false, true, true, false, 8, 30},       
-                {"@bc1@bc1", false, false, true, false, 8, 30},      
-                {"a1cda1cd", false, false, true, true, 8, 30},       
-                {"AbcdAbcd", false, false, false, true, 8, 30},      
-                {"@Bcd@Bcd", false, true, false, true, 8, 30},       
-                {"a1Ada1Ad", false, true, true, true, 8, 30},
-                {"", false, false, false, false, 1, 30},
-                {" ", false, false, false, false, 1, 30},
-                {"?!abcdef", false, true, false, false, 8, 30}
-             }
+    
+    static Stream<Arguments> testCases() {
+        return Stream.of(
+            // Check if PasswordValidator correctly validates correct passwords
+            // with all combinations of Special Characters,
+            // Capital Letters and Numbers
+            Arguments.of("abcdefghabcdefgh", true, false, false, false, 8, 30),
+            Arguments.of("@bcdefgh@bcdefgh", true, true, false, false, 8, 30),
+            Arguments.of("@bAdefgh@bAdefgh", true, true, true, false, 8, 30),
+            Arguments.of("abAdefghabAdefgh", true, false, true, false, 8, 30),
+            Arguments.of("a1Adefgha1Adefgh", true, false, true, true, 8, 30),
+            Arguments.of("ab1defghab1defgh", true, false, false, true, 8, 30),
+            Arguments.of("@1cdefgh@1cdefgh", true, true, false, true, 8, 30),
+            Arguments.of("@1Adefgh@1Adefgh", true, true, true, true, 8, 30),
+            // Check if PasswordValidator correctly rejects wrong passwords
+            // with all combinations of Special Characters,
+            // Capital Letters and Numbers
+            Arguments.of("abcabc", false, false, false, false, 8, 30),
+            Arguments.of("abcdabcd", false, true, false, false, 8, 30),
+            Arguments.of("@bcd@bcd", false, true, true, false, 8, 30),
+            Arguments.of("@bc1@bc1", false, false, true, false, 8, 30),
+            Arguments.of("a1cda1cd", false, false, true, true, 8, 30),
+            Arguments.of("AbcdAbcd", false, false, false, true, 8, 30),
+            Arguments.of("@Bcd@Bcd", false, true, false, true, 8, 30),
+            Arguments.of("a1Ada1Ad", false, true, true, true, 8, 30),
+            Arguments.of("", false, false, false, false, 1, 30),
+            Arguments.of(" ", false, false, false, false, 1, 30),
+            Arguments.of("?!abcdef", false, true, false, false, 8, 30)
         );
     }
     
-    @Test
-    public void testValidatePassword() {
+    @ParameterizedTest
+    @MethodSource("testCases")
+    void testValidatePassword(String password, boolean expected, boolean mustContainSpecialCharacters,
+                              boolean mustContainCapitalLetters, boolean mustContainNumbers, int minLength,
+                              int maxLength) {
         PasswordValidator validator = PasswordValidator.buildValidator(mustContainSpecialCharacters,
                 mustContainCapitalLetters, mustContainNumbers, minLength, maxLength);
         boolean isValidPassword = validator.validatePassword(password);
diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
index fc90d846bf6..dabc7f68fce 100644
--- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java
@@ -1,25 +1,55 @@
 package edu.harvard.iq.dataverse.pidproviders;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
+import edu.harvard.iq.dataverse.GlobalId;
+import edu.harvard.iq.dataverse.GlobalIdServiceBean;
+import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 import java.io.IOException;
-import javax.json.JsonObjectBuilder;
-import javax.ws.rs.NotFoundException;
-import org.junit.Test;
-import org.junit.Ignore;
+import java.util.ArrayList;
+import java.util.List;
+
+import jakarta.json.JsonObjectBuilder;
+import jakarta.ws.rs.NotFoundException;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.extension.ExtendWith;
+
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  * Useful for testing but requires DataCite credentials, etc.
  */
+@ExtendWith(MockitoExtension.class)
 public class PidUtilTest {
+    @Mock
+    private SettingsServiceBean settingsServiceBean;
+    @InjectMocks
+    private PermaLinkPidProviderServiceBean p = new PermaLinkPidProviderServiceBean();
+    
 
-    @Ignore
+    @BeforeEach
+    public void initMocks() {
+        MockitoAnnotations.initMocks(this);
+        Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Protocol)).thenReturn("perma");
+        Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Authority)).thenReturn("DANSLINK");
+        p.reInit();
+    }
+    
+    @Disabled
     @Test
     public void testGetDoi() throws IOException {
         String username = System.getenv("DataCiteUsername");
         String password = System.getenv("DataCitePassword");
         String baseUrl = "https://api.test.datacite.org";
-        String pid = "";
-        pid = "doi:10.70122/QE5A-XN55";
+        GlobalId pid = new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.70122","QE5A-XN55", "/", DOIServiceBean.DOI_RESOLVER_URL, null);
         try {
             JsonObjectBuilder result = PidUtil.queryDoi(pid, baseUrl, username, password);
             String out = JsonUtil.prettyPrint(result.build());
@@ -28,5 +58,23 @@ public void testGetDoi() throws IOException {
             System.out.println("ex: " + ex);
         }
     }
+    
+    @Test
+    public void testGetPermaLink() throws IOException {
+        List<GlobalIdServiceBean> list = new ArrayList<GlobalIdServiceBean>();
+        
+
+        list.add(p);
+        PidUtil.addAllToProviderList(list);
+        GlobalId pid = new GlobalId(PermaLinkPidProviderServiceBean.PERMA_PROTOCOL,"DANSLINK","QE5A-XN55", "", p.getUrlPrefix(), PermaLinkPidProviderServiceBean.PERMA_PROVIDER_NAME);
+        System.out.println(pid.asString());
+        System.out.println(pid.asURL());
+        
+        GlobalId pid2 = PidUtil.parseAsGlobalID(pid.asString());
+        assertEquals(pid.asString(), pid2.asString());
+        GlobalId pid3 = PidUtil.parseAsGlobalID(pid.asURL());
+        assertEquals(pid.asString(), pid3.asString());
+        
+    }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
index 3984f972308..da94b288bee 100644
--- a/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/privateurl/PrivateUrlUtilTest.java
@@ -14,7 +14,7 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import org.junit.Assert;
+
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.params.ParameterizedTest;
@@ -102,7 +102,7 @@ public void testGetDatasetFromRoleAssignmentSuccess() {
         RoleAssignment ra = this.createTestRoleAssignment(dataset);
 
         assertNotNull(PrivateUrlUtil.getDatasetFromRoleAssignment(ra));
-        assertEquals("#42", ra.getAssigneeIdentifier());
+        assertEquals(PrivateUrlUser.PREFIX + "42", ra.getAssigneeIdentifier());
     }
 
     @Test
@@ -137,7 +137,7 @@ public void testGetDraftDatasetVersionFromRoleAssignmentSuccess() {
 
         DatasetVersion datasetVersionOut = PrivateUrlUtil.getDraftDatasetVersionFromRoleAssignment(ra);
         assertNotNull(datasetVersionOut);
-        assertEquals("#42", ra.getAssigneeIdentifier());
+        assertEquals(PrivateUrlUser.PREFIX + "42", ra.getAssigneeIdentifier());
     }
 
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java b/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java
index efa83fbb950..f59f686a94c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/provenance/ProvInvestigatorTest.java
@@ -6,14 +6,15 @@
 package edu.harvard.iq.dataverse.provenance;
 
 import com.google.gson.JsonParser;
-import edu.harvard.iq.dataverse.NonEssentialTests;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.logging.Logger;
-import static org.junit.Assert.*;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import static org.junit.jupiter.api.Assertions.*;
+
+import edu.harvard.iq.dataverse.util.testing.Tags;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -27,13 +28,13 @@ public class ProvInvestigatorTest {
     JsonParser jsonParser;
     private static final Logger logger = Logger.getLogger(ProvInvestigatorTest.class.getCanonicalName());
     
-    @Before
+    @BeforeEach
     public void setUp() {
         provUtilBean = ProvInvestigator.getInstance();
         jsonParser = new JsonParser();
     }
     
-    @Category(NonEssentialTests.class)
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvValidator() {   
         String validJsonString = "{\n" +
@@ -105,7 +106,7 @@ public void testProvValidator() {
 
     }
     
-    @Category(NonEssentialTests.class)
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvNamesNotInsideEntity() throws IOException {
         //name and type on their own
@@ -120,7 +121,7 @@ public void testProvNamesNotInsideEntity() throws IOException {
         assertFalse(entities.size() > 0); 
     }
     
-    @Category(NonEssentialTests.class)
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvNameJsonParserEmptyEntities() throws IOException {
         String jsonString = "{\n" +
@@ -159,8 +160,8 @@ public void testProvNameJsonParserEmptyEntities() throws IOException {
     
     //Note: this test has entity tags in multiple places, all with unique names
     //Only one entity is added to our list per unique name.
-
-    @Category(NonEssentialTests.class)
+    
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvJsonWithEntitiesInMultiplePlaces() throws IOException {
         String jsonString = "{\n" +
@@ -233,7 +234,7 @@ public void testProvJsonWithEntitiesInMultiplePlaces() throws IOException {
         assertTrue(entities.size() == 7);
     }
     
-    @Category(NonEssentialTests.class)
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvJsonWithEntitiesInMultiplePlacesWithSameNames() throws IOException {
         String jsonString = "{\n" +
@@ -271,8 +272,8 @@ public void testProvJsonWithEntitiesInMultiplePlacesWithSameNames() throws IOExc
         assertTrue(entities.get("ex:report2").fileType.equals("not report"));
         assertTrue(entities.size() == 3); //ex:report2 & ex:report1 are repeated
     }
-   
-    @Category(NonEssentialTests.class)
+    
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testProvLongJsonWithEntities() throws IOException {
         String jsonString = "{\n" +
diff --git a/src/test/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtilTest.java
index d09c6eee8dc..99fd3b2766f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/repositorystorageabstractionlayer/RepositoryStorageAbstractionLayerUtilTest.java
@@ -4,10 +4,10 @@
 import edu.harvard.iq.dataverse.locality.StorageSite;
 import java.util.ArrayList;
 import java.util.List;
-import javax.json.JsonArray;
-import javax.json.JsonObject;
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class RepositoryStorageAbstractionLayerUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java
index aab6af660cb..adf48e05f09 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java
@@ -1,24 +1,17 @@
 package edu.harvard.iq.dataverse.search;
 
-import edu.harvard.iq.dataverse.ControlledVocabularyValue;
-import edu.harvard.iq.dataverse.Dataset;
-import edu.harvard.iq.dataverse.DatasetField;
-import edu.harvard.iq.dataverse.DatasetFieldServiceBean;
-import edu.harvard.iq.dataverse.DatasetFieldType;
-import edu.harvard.iq.dataverse.DatasetVersion;
-import edu.harvard.iq.dataverse.Dataverse;
+import edu.harvard.iq.dataverse.*;
 import edu.harvard.iq.dataverse.Dataverse.DataverseType;
-import edu.harvard.iq.dataverse.DataverseServiceBean;
-import edu.harvard.iq.dataverse.GlobalId;
-import edu.harvard.iq.dataverse.MetadataBlock;
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.common.SolrInputDocument;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
@@ -28,14 +21,14 @@
 import org.mockito.junit.jupiter.MockitoExtension;
 
 import java.io.IOException;
-import java.util.Arrays;
-import java.util.Set;
+import java.util.*;
 import java.util.logging.Logger;
 import java.util.stream.Collectors;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.junit.jupiter.api.Assertions.assertTrue;
 
+@LocalJvmSettings
 @ExtendWith(MockitoExtension.class)
 public class IndexServiceBeanTest {
     private static final Logger logger = Logger.getLogger(IndexServiceBeanTest.class.getCanonicalName());
@@ -105,10 +98,43 @@ public void TestIndexing() throws SolrServerException, IOException {
         assertTrue(indexedFields.contains("language"));
     }
 
+    @Test
+    public void testValidateBoundingBox() throws SolrServerException, IOException {
+        final IndexableDataset indexableDataset = createIndexableDataset();
+        final DatasetVersion datasetVersion = indexableDataset.getDatasetVersion();
+        DatasetField dsf = new DatasetField();
+        DatasetFieldType dsft = new DatasetFieldType(DatasetFieldConstant.geographicBoundingBox, DatasetFieldType.FieldType.TEXT, true);
+        dsf.setDatasetFieldType(dsft);
+
+        List<DatasetFieldCompoundValue> vals = new LinkedList<>();
+        DatasetFieldCompoundValue val = new DatasetFieldCompoundValue();
+        val.setParentDatasetField(dsf);
+        val.setChildDatasetFields(Arrays.asList(
+                constructBoundingBoxValue(DatasetFieldConstant.westLongitude, "34.9"), // bad value. must be less than east
+                constructBoundingBoxValue(DatasetFieldConstant.eastLongitude, "34.8"),
+                constructBoundingBoxValue(DatasetFieldConstant.northLatitude, "34.2"),
+                constructBoundingBoxValue(DatasetFieldConstant.southLatitude, "34.1")
+        ));
+        vals.add(val);
+        dsf.setDatasetFieldCompoundValues(vals);
+        datasetVersion.getDatasetFields().add(dsf);
+
+        final SolrInputDocuments docs = indexService.toSolrDocs(indexableDataset, null);
+        Optional<SolrInputDocument> doc = docs.getDocuments().stream().findFirst();
+        assertTrue(doc.isPresent());
+        assertTrue(!doc.get().containsKey("geolocation"));
+        assertTrue(!doc.get().containsKey("boundingBox"));
+    }
+    private DatasetField constructBoundingBoxValue(String datasetFieldTypeName, String value) {
+        DatasetField retVal = new DatasetField();
+        retVal.setDatasetFieldType(new DatasetFieldType(datasetFieldTypeName, DatasetFieldType.FieldType.TEXT, false));
+        retVal.setDatasetFieldValues(Collections.singletonList(new DatasetFieldValue(retVal, value)));
+        return retVal;
+    }
+
     private IndexableDataset createIndexableDataset() {
         final Dataset dataset = MocksFactory.makeDataset();
-        String fakeId = "doi:10.666/FAKE/fake";
-        dataset.setGlobalId(new GlobalId(fakeId));
+        dataset.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.666", "FAKE/fake", "/", DOIServiceBean.DOI_RESOLVER_URL, null));
         final DatasetVersion datasetVersion = dataset.getCreateVersion(null);
         DatasetField field = createCVVField("language", "English", false);
         datasetVersion.getDatasetFields().add(field);
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexUtilTest.java
index 6599f829673..9be53b8b8b1 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/IndexUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexUtilTest.java
@@ -1,32 +1,32 @@
 package edu.harvard.iq.dataverse.search;
 
 import java.util.Arrays;
-import java.util.List;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
 public class IndexUtilTest {
 
     public IndexUtilTest() {
     }
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
 
-    @Before
+    @BeforeEach
     public void setUp() {
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
     }
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBeanTest.java
index 0ea5597b905..c9f9eb67130 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SearchFilesServiceBeanTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.search;
 
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
 public class SearchFilesServiceBeanTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SearchIncludeFragmentTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SearchIncludeFragmentTest.java
index f94da336ca3..234d72c0d19 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SearchIncludeFragmentTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SearchIncludeFragmentTest.java
@@ -5,7 +5,7 @@
 import edu.harvard.iq.dataverse.MetadataBlock;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.util.Arrays;
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java
index a3b3c8a2080..72eafcd763c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java
@@ -4,6 +4,7 @@
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.SystemConfig;
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
@@ -14,6 +15,7 @@
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
+@LocalJvmSettings
 @ExtendWith(MockitoExtension.class)
 class SolrClientServiceTest {
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
index 8def87cee5a..4fb29869db7 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrSearchResultTest.java
@@ -1,18 +1,18 @@
 package edu.harvard.iq.dataverse.search;
 
-import javax.json.Json;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.ArrayList;
 import java.util.List;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 // ****************************************************************************************
 // The following tests test the setPublicationStatuses method aiming for 100% prime 
@@ -35,7 +35,7 @@ public class SolrSearchResultTest {
 
     SolrSearchResult solrSearchResult;
 
-    @Before
+    @BeforeEach
     public void before() {
         this.unpublishedFlag = IndexServiceBean.getUNPUBLISHED_STRING();
         this.publishedFlag = IndexServiceBean.getPUBLISHED_STRING();
@@ -47,7 +47,7 @@ public void before() {
         this.solrSearchResult = new SolrSearchResult("myQuery", "myName");
     }
 
-    @After
+    @AfterEach
     public void after() {
         this.unpublishedFlag = null;
         this.publishedFlag = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SortByTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SortByTest.java
index 956063d4e1f..c8425198b83 100644
--- a/src/test/java/edu/harvard/iq/dataverse/search/SortByTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/search/SortByTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.search;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 public class SortByTest {
 
@@ -14,14 +14,14 @@ public class SortByTest {
     private String order;
     private SortBy instance;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         this.field = "field";
         this.order = SortBy.ASCENDING;
         this.instance = new SortBy(field, order);
     }
 
-    @After
+    @AfterEach
     public void tearDown() {
         this.field = null;
         this.order = null;
diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java
new file mode 100644
index 00000000000..dad86e73d19
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java
@@ -0,0 +1,110 @@
+package edu.harvard.iq.dataverse.settings;
+
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Assumptions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Set;
+
+import static java.nio.file.attribute.PosixFilePermission.GROUP_READ;
+import static java.nio.file.attribute.PosixFilePermission.OWNER_READ;
+import org.junit.jupiter.api.Disabled;
+
+@Disabled
+class ConfigCheckServiceTest {
+    
+    @TempDir
+    static Path testDir;
+    
+    private static final String testDirProp = "test.filesDir";
+    
+    @AfterAll
+    static void tearDown() {
+        System.clearProperty(testDirProp);
+    }
+    
+    @Nested
+    class TestDirNotAbsolute {
+        @Test
+        void nonAbsolutePathForTestDir() {
+            System.setProperty(testDirProp, "foobar");
+            ConfigCheckService sut = new ConfigCheckService();
+            Assertions.assertFalse(sut.checkSystemDirectories());
+        }
+    }
+
+    @Nested
+    class TestDirNotWritable {
+        
+        Path notWriteableSubfolder = testDir.resolve("readonly");
+        
+        @BeforeEach
+        void setUp() throws IOException {
+            Files.createDirectory(notWriteableSubfolder);
+            Files.setPosixFilePermissions(notWriteableSubfolder, Set.of(OWNER_READ, GROUP_READ));
+            System.setProperty(testDirProp, notWriteableSubfolder.toString());
+        }
+        
+        @Test
+        void writeCheckFails() {
+            Assumptions.assumeTrue(Files.exists(notWriteableSubfolder));
+            
+            ConfigCheckService sut = new ConfigCheckService();
+            Assertions.assertFalse(sut.checkSystemDirectories());
+        }
+    }
+    
+    @Nested
+    class TestDirNotExistent {
+        
+        Path notExistTestfolder = testDir.resolve("parent-readonly");
+        Path notExistConfigSubfolder = notExistTestfolder.resolve("foobar");
+        
+        @BeforeEach
+        void setUp() throws IOException {
+            Files.createDirectory(notExistTestfolder);
+            // Make test dir not writeable, so the subfolder cannot be created
+            Files.setPosixFilePermissions(notExistTestfolder, Set.of(OWNER_READ, GROUP_READ));
+            System.setProperty(testDirProp, notExistConfigSubfolder.toString());
+        }
+        
+        @Test
+        void mkdirFails() {
+            Assumptions.assumeTrue(Files.exists(notExistTestfolder));
+            Assumptions.assumeFalse(Files.exists(notExistConfigSubfolder));
+            
+            ConfigCheckService sut = new ConfigCheckService();
+            Assertions.assertFalse(sut.checkSystemDirectories());
+        }
+    }
+    
+    @Nested
+    class TestDirCreated {
+
+        Path missingToBeCreatedTestfolder = testDir.resolve("create-me");
+        Path missingToBeCreatedSubfolder = missingToBeCreatedTestfolder.resolve("foobar");
+        
+        @BeforeEach
+        void setUp() throws IOException {
+            Files.createDirectory(missingToBeCreatedTestfolder);
+            System.setProperty(testDirProp, missingToBeCreatedSubfolder.toString());
+        }
+        
+        @Test
+        void mkdirSucceeds() {
+            Assumptions.assumeTrue(Files.exists(missingToBeCreatedTestfolder));
+            Assumptions.assumeFalse(Files.exists(missingToBeCreatedSubfolder));
+            
+            ConfigCheckService sut = new ConfigCheckService();
+            Assertions.assertTrue(sut.checkSystemDirectories());
+        }
+    }
+    
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java
new file mode 100644
index 00000000000..26f2186695d
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/settings/FeatureFlagsTest.java
@@ -0,0 +1,23 @@
+package edu.harvard.iq.dataverse.settings;
+
+import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+@LocalJvmSettings
+class FeatureFlagsTest {
+
+    @Test
+    @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "false", varArgs = "api-session-auth")
+    void testFlagDisabled() {
+        assertFalse(FeatureFlags.API_SESSION_AUTH.enabled());
+    }
+
+    @Test
+    @JvmSetting(key = JvmSettings.FEATURE_FLAG, value = "true", varArgs = "api-session-auth")
+    void testFlagEnabled() {
+        assertTrue(FeatureFlags.API_SESSION_AUTH.enabled());
+    }
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java
index 68458f6623c..6b03f20fc41 100644
--- a/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/settings/JvmSettingsTest.java
@@ -1,10 +1,13 @@
 package edu.harvard.iq.dataverse.settings;
 
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
+import edu.harvard.iq.dataverse.util.testing.SystemProperty;
 import org.junit.jupiter.api.Test;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
+@LocalJvmSettings
 class JvmSettingsTest {
     @Test
     @JvmSetting(key = JvmSettings.VERSION, value = "foobar")
@@ -13,8 +16,23 @@ void lookupSetting() {
         assertEquals("foobar", JvmSettings.VERSION.lookupOptional().orElse(""));
     }
     
-    /*
-     * TODO: add more tests here for features like old names, patterned settings etc when adding
-     *       these in other pull requests adding new settings making use of these features.
-     */
+    @Test
+    @SystemProperty(key = "doi.username", value = "test")
+    void lookupSettingViaAlias() {
+        assertEquals("test", JvmSettings.DATACITE_USERNAME.lookup());
+    }
+    
+    @Test
+    @SystemProperty(key = "doi.baseurlstring", value = "test")
+    void lookupSettingViaAliasWithDefaultInMPCFile() {
+        assertEquals("test", JvmSettings.DATACITE_MDS_API_URL.lookup());
+    }
+    
+    @Test
+    @SystemProperty(key = "doi.dataciterestapiurlstring", value = "foo")
+    @SystemProperty(key = "doi.mdcbaseurlstring", value = "bar")
+    void lookupSettingViaAliasWithDefaultInMPCFileAndTwoAliases() {
+        assertEquals("foo", JvmSettings.DATACITE_REST_API_URL.lookup());
+    }
+
 }
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java
index cc691f0a3b5..41032ffa811 100644
--- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java
@@ -1,5 +1,6 @@
 package edu.harvard.iq.dataverse.sitemap;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.Dataverse;
@@ -9,7 +10,6 @@
 import edu.harvard.iq.dataverse.util.xml.XmlValidator;
 import java.io.File;
 import java.io.IOException;
-import java.net.MalformedURLException;
 import java.net.URL;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -20,17 +20,39 @@
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import org.junit.Test;
-import org.xml.sax.SAXException;
 
-public class SiteMapUtilTest {
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
-    @Test
-    public void testUpdateSiteMap() throws IOException, ParseException {
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+import org.xml.sax.SAXException;
 
+class SiteMapUtilTest {
+
+    @TempDir
+    Path tempDir;
+    Path tempDocroot;
+    
+    @BeforeEach
+    void setup() throws IOException {
+        // NOTE: This might be unsafe for parallel tests, but our @SystemProperty helper does not yet support
+        //       lookups from vars or methods.
+        System.setProperty("test.filesDir", tempDir.toString());
+        this.tempDocroot = tempDir.resolve("docroot");
+        Files.createDirectory(tempDocroot);
+    }
+    
+    @AfterEach
+    void teardown() {
+        System.clearProperty("test.filesDir");
+    }
+    
+    @Test
+    void testUpdateSiteMap() throws IOException, ParseException, SAXException {
+        // given
         List<Dataverse> dataverses = new ArrayList<>();
         String publishedDvString = "publishedDv1";
         Dataverse publishedDataverse = new Dataverse();
@@ -44,29 +66,31 @@ public void testUpdateSiteMap() throws IOException, ParseException {
         List<Dataset> datasets = new ArrayList<>();
 
         Dataset published = new Dataset();
-        String publishedPid = "doi:10.666/FAKE/published1";
-        published.setGlobalId(new GlobalId(publishedPid));
+        published.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.666", "FAKE/published1", null, DOIServiceBean.DOI_RESOLVER_URL, null));
+        String publishedPid = published.getGlobalId().asString();
         published.setPublicationDate(new Timestamp(new Date().getTime()));
         published.setModificationTime(new Timestamp(new Date().getTime()));
         datasets.add(published);
 
         Dataset unpublished = new Dataset();
-        String unpublishedPid = "doi:10.666/FAKE/unpublished1";
-        unpublished.setGlobalId(new GlobalId(unpublishedPid));
+        unpublished.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.666", "FAKE/unpublished1", null, DOIServiceBean.DOI_RESOLVER_URL, null));
+        String unpublishedPid = unpublished.getGlobalId().asString();
+
         Timestamp nullPublicationDateToIndicateNotPublished = null;
         unpublished.setPublicationDate(nullPublicationDateToIndicateNotPublished);
         datasets.add(unpublished);
 
         Dataset harvested = new Dataset();
-        String harvestedPid = "doi:10.666/FAKE/harvested1";
-        harvested.setGlobalId(new GlobalId(harvestedPid));
+        harvested.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.666", "FAKE/harvested1", null, DOIServiceBean.DOI_RESOLVER_URL, null));
+        String harvestedPid = harvested.getGlobalId().asString();
         harvested.setPublicationDate(new Timestamp(new Date().getTime()));
         harvested.setHarvestedFrom(new HarvestingClient());
         datasets.add(harvested);
 
         Dataset deaccessioned = new Dataset();
-        String deaccessionedPid = "doi:10.666/FAKE/harvested1";
-        deaccessioned.setGlobalId(new GlobalId(deaccessionedPid));
+        deaccessioned.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL, "10.666", "FAKE/deaccessioned1", null, DOIServiceBean.DOI_RESOLVER_URL, null));
+        String deaccessionedPid = deaccessioned.getGlobalId().asString();
+
         deaccessioned.setPublicationDate(new Timestamp(new Date().getTime()));
         List<DatasetVersion> datasetVersions = new ArrayList<>();
         DatasetVersion datasetVersion = new DatasetVersion();
@@ -74,39 +98,18 @@ public void testUpdateSiteMap() throws IOException, ParseException {
         datasetVersions.add(datasetVersion);
         deaccessioned.setVersions(datasetVersions);
         datasets.add(deaccessioned);
-
-        Path tmpDirPath = Files.createTempDirectory(null);
-        String tmpDir = tmpDirPath.toString();
-        File docroot = new File(tmpDir + File.separator + "docroot");
-        docroot.mkdirs();
-        System.setProperty("com.sun.aas.instanceRoot", tmpDir);
-
+        
+        // when
         SiteMapUtil.updateSiteMap(dataverses, datasets);
-
-        String pathToTest = tmpDirPath + File.separator + "docroot" + File.separator + "sitemap";
-        String pathToSiteMap = pathToTest + File.separator + "sitemap.xml";
-
-        Exception wellFormedXmlException = null;
-        try {
-            assertTrue(XmlValidator.validateXmlWellFormed(pathToSiteMap));
-        } catch (Exception ex) {
-            System.out.println("Exception caught checking that XML is well formed: " + ex);
-            wellFormedXmlException = ex;
-        }
-        assertNull(wellFormedXmlException);
-
-        Exception notValidAgainstSchemaException = null;
-        try {
-            assertTrue(XmlValidator.validateXmlSchema(pathToSiteMap, new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd")));
-        } catch (MalformedURLException | SAXException ex) {
-            System.out.println("Exception caught validating XML against the sitemap schema: " + ex);
-            notValidAgainstSchemaException = ex;
-        }
-        assertNull(notValidAgainstSchemaException);
+        
+        // then
+        String pathToSiteMap = tempDocroot.resolve("sitemap").resolve("sitemap.xml").toString();
+        assertDoesNotThrow(() -> XmlValidator.validateXmlWellFormed(pathToSiteMap));
+        assertTrue(XmlValidator.validateXmlSchema(pathToSiteMap, new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd")));
 
         File sitemapFile = new File(pathToSiteMap);
         String sitemapString = XmlPrinter.prettyPrintXml(new String(Files.readAllBytes(Paths.get(sitemapFile.getAbsolutePath()))));
-        System.out.println("sitemap: " + sitemapString);
+        //System.out.println("sitemap: " + sitemapString);
 
         assertTrue(sitemapString.contains("1955-11-12"));
         assertTrue(sitemapString.contains(publishedPid));
@@ -114,8 +117,6 @@ public void testUpdateSiteMap() throws IOException, ParseException {
         assertFalse(sitemapString.contains(harvestedPid));
         assertFalse(sitemapString.contains(deaccessionedPid));
 
-        System.clearProperty("com.sun.aas.instanceRoot");
-
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/userdata/UserListMakerTest.java b/src/test/java/edu/harvard/iq/dataverse/userdata/UserListMakerTest.java
index b744db5be20..10c07cf0ef5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/userdata/UserListMakerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/userdata/UserListMakerTest.java
@@ -1,6 +1,6 @@
 package edu.harvard.iq.dataverse.userdata;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import static org.mockito.Mockito.mock;
 
 import org.junit.jupiter.api.AfterEach;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/BitSetTest.java b/src/test/java/edu/harvard/iq/dataverse/util/BitSetTest.java
index 5aa37e8b05c..475d986b63c 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/BitSetTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/BitSetTest.java
@@ -7,12 +7,13 @@
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.List;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -27,21 +28,21 @@ enum TestEnum {
 	public BitSetTest() {
 	}
 	
-	@BeforeClass
+	@BeforeAll
 	public static void setUpClass() {
 	}
 	
-	@AfterClass
+	@AfterAll
 	public static void tearDownClass() {
 	}
 	
 	BitSet sut;
-	@Before
+	@BeforeEach
 	public void setUp() {
 		sut = new BitSet();
 	}
 	
-	@After
+	@AfterEach
 	public void tearDown() {
 	}
 
@@ -116,9 +117,9 @@ public void testIntersect() {
 		sut = sut1.copy().intersect(sut2);
 		for ( short i : BitSet.allIndices() ) {
 			if ( sut.isSet(i) ) {
-				assertTrue( "expected true at idx " + i, sut1.isSet(i) && sut2.isSet(i) );
+				assertTrue(sut1.isSet(i) && sut2.isSet(i), "expected true at idx " + i);
 			} else {
-				assertFalse( "expected false at idx " + i, sut1.isSet(i) && sut2.isSet(i) );
+				assertFalse(sut1.isSet(i) && sut2.isSet(i), "expected false at idx " + i);
 			}
 		}
 	}
@@ -133,9 +134,9 @@ public void testXor() {
 		sut = sut1.copy().xor(sut2);
 		for ( short i : BitSet.allIndices() ) {
 			if ( sut.isSet(i) ) {
-				assertTrue( "expected true at idx " + i, sut1.isSet(i) ^ sut2.isSet(i) );
+				assertTrue(sut1.isSet(i) ^ sut2.isSet(i), "expected true at idx " + i);
 			} else {
-				assertFalse( "expected false at idx " + i, sut1.isSet(i) ^ sut2.isSet(i) );
+				assertFalse(sut1.isSet(i) ^ sut2.isSet(i), "expected false at idx " + i);
 			}
 		}
 	}
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/BundleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/BundleUtilTest.java
index 8889d492829..8f788154a93 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/BundleUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/BundleUtilTest.java
@@ -1,11 +1,14 @@
 package edu.harvard.iq.dataverse.util;
 
 import java.util.Arrays;
-import java.util.Locale;
+import java.util.List;
 import java.util.MissingResourceException;
-import java.util.ResourceBundle;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 public class BundleUtilTest {
 
@@ -20,7 +23,7 @@ public void testGetStringFromBundle() {
 
     @Test
     public void testGetStringFromBundleWithArguments() {
-        assertEquals(null, BundleUtil.getStringFromBundle(null, null));
+        assertEquals(null, BundleUtil.getStringFromBundle(null, (List<String>) null));
         String actual = BundleUtil.getStringFromBundle("dataverse.create.success", Arrays.asList("http://guides.dataverse.org/en", "4.0"));
         String expected = "You have successfully created your dataverse! To learn more about what you can do with your dataverse, check out the <a href=\"http://guides.dataverse.org/en/4.0/user/dataverse-management.html\" title=\"Dataverse Management - Dataverse User Guide\" target=\"_blank\">User Guide</a>.";
         assertEquals(expected, actual);
@@ -78,14 +81,14 @@ public void testStringFromPropertyFile() {
     }
 
     //To assure that the MissingResourceException bubble up from this call
-    @Test(expected = MissingResourceException.class)
-    public void testStringFromPropertyFileException() {
-        BundleUtil.getStringFromPropertyFile("FAKE","MimeTypeFacets");
+    @Test
+    void testStringFromPropertyFileException() {
+        assertThrows(MissingResourceException.class, () -> BundleUtil.getStringFromPropertyFile("FAKE","MimeTypeFacets"));
     }
     
     //To assure MissingResourceException is caught when calling normal bundle calls
     @Test
-    public void testNoErrorNonExistentStringBundle() {
-        BundleUtil.getStringFromBundle("FAKE", null, BundleUtil.getResourceBundle("MimeTypeFacets")); 
+    void testNoErrorNonExistentStringBundle() {
+        assertDoesNotThrow(() -> BundleUtil.getStringFromBundle("FAKE", null, BundleUtil.getResourceBundle("MimeTypeFacets")));
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileSortFieldAndOrderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileSortFieldAndOrderTest.java
index 0ad76832952..84a200542fe 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/FileSortFieldAndOrderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/FileSortFieldAndOrderTest.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.util;
 
 import edu.harvard.iq.dataverse.search.SortBy;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class FileSortFieldAndOrderTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java
index 1d481f18cf5..2cfe9f25d7e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java
@@ -8,10 +8,6 @@
 import edu.harvard.iq.dataverse.FileMetadata;
 import edu.harvard.iq.dataverse.Guestbook;
 import edu.harvard.iq.dataverse.TermsOfUseAndAccess;
-import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import edu.harvard.iq.dataverse.*;
-import edu.harvard.iq.dataverse.api.UtilIT;
 import edu.harvard.iq.dataverse.license.License;
 import edu.harvard.iq.dataverse.util.FileUtil.FileCitationExtension;
 
@@ -19,110 +15,76 @@
 import java.io.IOException;
 import java.time.LocalDate;
 import java.net.URI;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.logging.Level;
 import java.util.logging.Logger;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.fail;
-
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
-
-@RunWith(Enclosed.class)
-public class FileUtilTest {
-
-    @RunWith(Parameterized.class)
-    public static class FileUtilParamTest {
-
-        @Parameters
-        public static Collection data() {
-            return Arrays.asList(new Object[][] {
-                { null, null, null },
-
-                { "trees.png-endnote.xml", "trees.png", FileUtil.FileCitationExtension.ENDNOTE },
-                { "trees.png.ris", "trees.png", FileUtil.FileCitationExtension.RIS },
-                { "trees.png.bib", "trees.png", FileUtil.FileCitationExtension.BIBTEX },
-                { null, "trees.png", null },
+import java.util.stream.Stream;
 
-                { "50by1000-endnote.xml", "50by1000.tab", FileUtil.FileCitationExtension.ENDNOTE },
-                { "50by1000.ris", "50by1000.tab", FileUtil.FileCitationExtension.RIS },
-                { "50by1000.bib", "50by1000.tab", FileUtil.FileCitationExtension.BIBTEX }
-            });
-        }
-
-        @Parameter
-        public String expectedFileName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
-        @Parameter(1)
-        public String actualFileName;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
-        @Parameter(2)
-        public FileCitationExtension citationExtension;
-
-        @Test
-        public void testGetCiteDataFileFilename() {
-            assertEquals(expectedFileName, FileUtil.getCiteDataFileFilename(actualFileName, citationExtension));
-        }
+public class FileUtilTest {
+    
+    static Stream<Arguments> dataFilenames() {
+        return Stream.of(
+            Arguments.of(null, null, null),
+            Arguments.of("trees.png-endnote.xml", "trees.png", FileUtil.FileCitationExtension.ENDNOTE),
+            Arguments.of("trees.png.ris", "trees.png", FileUtil.FileCitationExtension.RIS),
+            Arguments.of("trees.png.bib", "trees.png", FileUtil.FileCitationExtension.BIBTEX),
+            Arguments.of(null, "trees.png", null),
+            Arguments.of("50by1000-endnote.xml", "50by1000.tab", FileUtil.FileCitationExtension.ENDNOTE),
+            Arguments.of("50by1000.ris", "50by1000.tab", FileUtil.FileCitationExtension.RIS),
+            Arguments.of("50by1000.bib", "50by1000.tab", FileUtil.FileCitationExtension.BIBTEX)
+        );
     }
-
-    @RunWith(Parameterized.class)
-    public static class FileUtilParamTest2 {
-
-        @Parameter
-        public String expectedString;
-
-        @Parameter(1)
-        public String originalName;
-
-        @Parameter(2)
-        public String newExtension;
-
-        @Parameters
-        public static Collection data() {
-            return Arrays.asList(new Object[][] {
-                // functional approach: what should the method do
-                // replace no extension with an empty extension
-                { "no-extension.", "no-extension", ""},
-
-                // replace extension x with same extension
-                { "extension.x", "extension.x", "x" },
-
-                // replace extension x with another extension y
-                { "extension.y", "extension.x", "y" },
-
-                // interface approach: what are possible inputs
-                // will not pass as null is not handled
-                //{ null, null, null },
-                //{ null, null, "" },
-                //{ null, null, "y" },
-
-                { ".null", "", null },
-                { ".", "", "" },
-                { ".y", "", "y" },
-            });
-        }
-
-        @Test
-        public void testReplaceExtension() {
-            assertEquals(expectedString, FileUtil.replaceExtension(originalName, newExtension));
-        }
-
+    
+    @ParameterizedTest
+    @MethodSource("dataFilenames")
+    void testGetCiteDataFileFilename(String expectedFileName, String actualFileName, FileCitationExtension citationExtension) {
+        assertEquals(expectedFileName, FileUtil.getCiteDataFileFilename(actualFileName, citationExtension));
+    }
+    
+    static Stream<Arguments> dataReplaceNames() {
+        return Stream.of(
+            // functional approach: what should the method do
+            // replace no extension with an empty extension
+            Arguments.of("no-extension.", "no-extension", ""),
+        
+            // replace extension x with same extension
+            Arguments.of("extension.x", "extension.x", "x"),
+        
+            // replace extension x with another extension y
+            Arguments.of("extension.y", "extension.x", "y"),
+        
+            // interface approach: what are possible inputs
+            // will not pass as null is not handled
+            //Arguments.of(null, null, null),
+            //Arguments.of(null, null, ""),
+            //Arguments.of(null, null, "y"),
+            
+            Arguments.of(".null", "", null),
+            Arguments.of(".", "", ""),
+            Arguments.of(".y", "", "y")
+        );
+    }
+    
+    @ParameterizedTest
+    @MethodSource("dataReplaceNames")
+    void testReplaceExtension(String expectedString, String originalName, String newExtension) {
+        assertEquals(expectedString, FileUtil.replaceExtension(originalName, newExtension));
     }
 
-    public static class FileUtilNoParamTest {
+    static class FileUtilNoParamTest {
         @Test
         public void testIsDownloadPopupRequiredNull() {
-            assertEquals(false, FileUtil.isDownloadPopupRequired(null));
+            assertFalse(FileUtil.isDownloadPopupRequired(null));
         }
 
         @Test
@@ -130,7 +92,7 @@ public void testIsDownloadPopupRequiredDraft() {
             Dataset dataset = new Dataset();
             DatasetVersion dsv1 = dataset.getOrCreateEditVersion();
             assertEquals(DatasetVersion.VersionState.DRAFT, dsv1.getVersionState());
-            assertEquals(false, FileUtil.isDownloadPopupRequired(dsv1));
+            assertFalse(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -142,7 +104,7 @@ public void testIsDownloadPopupRequiredLicenseCC0() {
             license.setDefault(true);
             termsOfUseAndAccess.setLicense(license);
             dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess);
-            assertEquals(false, FileUtil.isDownloadPopupRequired(dsv1));
+            assertFalse(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -160,7 +122,7 @@ public void testIsDownloadPopupRequiredHasTermsOfUseAndCc0License() {
             termsOfUseAndAccess.setLicense(license);
             termsOfUseAndAccess.setTermsOfUse("be excellent to each other");
             dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess);
-            assertEquals(false, FileUtil.isDownloadPopupRequired(dsv1));
+            assertFalse(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -171,7 +133,7 @@ public void testIsDownloadPopupRequiredHasTermsOfUseAndNoneLicense() {
             termsOfUseAndAccess.setLicense(null);
             termsOfUseAndAccess.setTermsOfUse("be excellent to each other");
             dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess);
-            assertEquals(true, FileUtil.isDownloadPopupRequired(dsv1));
+            assertTrue(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -181,7 +143,7 @@ public void testIsDownloadPopupRequiredHasTermsOfAccess() {
             TermsOfUseAndAccess termsOfUseAndAccess = new TermsOfUseAndAccess();
             termsOfUseAndAccess.setTermsOfAccess("Terms of *Access* is different than Terms of Use");
             dsv1.setTermsOfUseAndAccess(termsOfUseAndAccess);
-            assertEquals(true, FileUtil.isDownloadPopupRequired(dsv1));
+            assertTrue(FileUtil.isDownloadPopupRequired(dsv1));
         }
 
         @Test
@@ -195,17 +157,17 @@ public void testIsDownloadPopupRequiredHasGuestBook() {
             dataset.setGuestbook(guestbook);
             Dataverse dataverse = new Dataverse();
             guestbook.setDataverse(dataverse);
-            assertEquals(true, FileUtil.isDownloadPopupRequired(datasetVersion));
+            assertTrue(FileUtil.isDownloadPopupRequired(datasetVersion));
         }
 
         @Test
         public void testIsPubliclyDownloadable() {
-            assertEquals(false, FileUtil.isPubliclyDownloadable(null));
+            assertFalse(FileUtil.isPubliclyDownloadable(null));
 
             FileMetadata restrictedFileMetadata = new FileMetadata();
             restrictedFileMetadata.setRestricted(true);
             restrictedFileMetadata.setDataFile(new DataFile());
-            assertEquals(false, FileUtil.isPubliclyDownloadable(restrictedFileMetadata));
+            assertFalse(FileUtil.isPubliclyDownloadable(restrictedFileMetadata));
 
             FileMetadata nonRestrictedFileMetadata = new FileMetadata();
             nonRestrictedFileMetadata.setDataFile(new DataFile());
@@ -215,7 +177,7 @@ public void testIsPubliclyDownloadable() {
             Dataset dataset = new Dataset();
             dsv.setDataset(dataset);
             nonRestrictedFileMetadata.setRestricted(false);
-            assertEquals(true, FileUtil.isPubliclyDownloadable(nonRestrictedFileMetadata));
+            assertTrue(FileUtil.isPubliclyDownloadable(nonRestrictedFileMetadata));
         }
 
         @Test
@@ -232,7 +194,7 @@ public void testIsPubliclyDownloadable2() {
             Dataset dataset = new Dataset();
             dsv.setDataset(dataset);
             nonRestrictedFileMetadata.setRestricted(false);
-            assertEquals(false, FileUtil.isPubliclyDownloadable(nonRestrictedFileMetadata));
+            assertFalse(FileUtil.isPubliclyDownloadable(nonRestrictedFileMetadata));
         }
 
         @Test
@@ -250,7 +212,7 @@ public void testIsPubliclyDownloadable3() {
             Dataset dataset = new Dataset();
             dsv.setDataset(dataset);
             embargoedFileMetadata.setRestricted(false);
-            assertEquals(false, FileUtil.isPubliclyDownloadable(embargoedFileMetadata));
+            assertFalse(FileUtil.isPubliclyDownloadable(embargoedFileMetadata));
         }
 
         @Test
@@ -272,7 +234,7 @@ public void testgetFileDownloadUrl() {
 
         @Test
         public void testGetPublicDownloadUrl() {
-            assertEquals(null, FileUtil.getPublicDownloadUrl(null, null, null));
+            assertNull(FileUtil.getPublicDownloadUrl(null, null, null));
             assertEquals("https://demo.dataverse.org/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/TLU3EP", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", "doi:10.5072/FK2/TLU3EP", 33L)); //pid before fileId
             assertEquals("https://demo.dataverse.org/api/access/datafile/:persistentId?persistentId=doi:10.5072/FK2/TLU3EP", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", "doi:10.5072/FK2/TLU3EP", null));
             assertEquals("https://demo.dataverse.org/api/access/datafile/33", FileUtil.getPublicDownloadUrl("https://demo.dataverse.org", null, 33L)); //pid before fileId
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/FirstNameTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FirstNameTest.java
similarity index 92%
rename from src/test/java/edu/harvard/iq/dataverse/export/FirstNameTest.java
rename to src/test/java/edu/harvard/iq/dataverse/util/FirstNameTest.java
index 1b935b0406e..1925094a2d8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/FirstNameTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/FirstNameTest.java
@@ -1,8 +1,8 @@
-package edu.harvard.iq.dataverse.export;
+package edu.harvard.iq.dataverse.util;
 
-import edu.harvard.iq.dataverse.export.openaire.FirstNames;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/JhoveFileTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/util/JhoveFileTypeTest.java
index 88a8d24c772..879c4bc1c4e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/JhoveFileTypeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/JhoveFileTypeTest.java
@@ -5,10 +5,10 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import org.apache.commons.io.FileUtils;
-import org.junit.AfterClass;
-import static org.junit.Assert.assertEquals;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 
 public class JhoveFileTypeTest {
 
@@ -24,7 +24,7 @@ public class JhoveFileTypeTest {
     static File ico;
     static File ipynb;
 
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
         System.setProperty("com.sun.aas.instanceRoot", baseDirForConfigFiles);
         jhoveFileType = new JhoveFileType();
@@ -41,7 +41,7 @@ public static void setUpClass() {
         ipynb = new File("src/test/java/edu/harvard/iq/dataverse/util/irc-metrics.ipynb");
     }
 
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
         // SiteMapUtilTest relies on com.sun.aas.instanceRoot being null.
         System.clearProperty("com.sun.aas.instanceRoot");
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/JsfHelperTest.java b/src/test/java/edu/harvard/iq/dataverse/util/JsfHelperTest.java
index 545d3b1a31f..316b9c3a2aa 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/JsfHelperTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/JsfHelperTest.java
@@ -4,72 +4,53 @@
 
 package edu.harvard.iq.dataverse.util;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-import java.util.Arrays;
-import java.util.Collection;
+import java.util.stream.Stream;
 
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
-/**
- *
- * @author michael
- */
-@RunWith(Parameterized.class)
 public class JsfHelperTest {
 	
 	enum TestEnum { Lorem, Ipsum, Dolor, Sit, Amet }
 	
-	@BeforeClass
+	@BeforeAll
 	public static void setUpClass() {
 	}
 	
-	@AfterClass
+	@AfterAll
 	public static void tearDownClass() {
 	}
 	
-	@Before
+	@BeforeEach
 	public void setUp() {
 	}
 	
-	@After
+	@AfterEach
 	public void tearDown() {
 	}
-
-	public TestEnum inputEnum;
-	public String inputString;
-	public TestEnum defaultEnumValue;
 	
-	public JsfHelperTest(TestEnum inputEnum, String inputString, TestEnum defaultEnumValue) {
-		this.inputEnum = inputEnum;
-		this.inputString = inputString;
-		this.defaultEnumValue = defaultEnumValue;
-	}
-
-	@Parameters
-	public static Collection<Object[]> parameters() {
-		return Arrays.asList (
-			new Object[][] {
-				{ TestEnum.Lorem, "Lorem", TestEnum.Dolor },
-				{ TestEnum.Lorem, "Lorem   ", TestEnum.Dolor },
-				{ TestEnum.Dolor, null, TestEnum.Dolor },
-				{ TestEnum.Dolor, "THIS IS A BAD VALUE", TestEnum.Dolor },
-			}
+	static Stream<Arguments> parameters() {
+		return Stream.of(
+			Arguments.of(TestEnum.Lorem, "Lorem", TestEnum.Dolor),
+			Arguments.of(TestEnum.Lorem, "Lorem   ", TestEnum.Dolor),
+			Arguments.of(TestEnum.Dolor, null, TestEnum.Dolor),
+			Arguments.of(TestEnum.Dolor, "THIS IS A BAD VALUE", TestEnum.Dolor )
 		);
 	}
 
 	/**
 	 * Test of enumValue method, of class JsfHelper.
 	 */
-	@Test
-	public void testEnumValue() {
+	@ParameterizedTest
+	@MethodSource("parameters")
+	public void testEnumValue(TestEnum inputEnum, String inputString, TestEnum defaultEnumValue) {
 		System.out.println("enumValue");
 		JsfHelper instance = new JsfHelper();
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/LruCacheTest.java b/src/test/java/edu/harvard/iq/dataverse/util/LruCacheTest.java
index b7ecff4ed8b..697e4853c3f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/LruCacheTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/LruCacheTest.java
@@ -1,9 +1,10 @@
 package edu.harvard.iq.dataverse.util;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
@@ -13,12 +14,12 @@ public class LruCacheTest {
     
     LruCache<Long, String> sut;
     
-    @Before
+    @BeforeEach
     public void setUp() {
         sut = new LruCache<>();
     }
     
-    @After
+    @AfterEach
     public void tearDown() {
         sut = null;
     }
@@ -65,9 +66,9 @@ public void testLRU() {
         
     }
 
-    @Test(expected = IllegalArgumentException.class)
-    public void testSetMaxSizeWithException() {
-        sut.setMaxSize(0l);
+    @Test
+    void testSetMaxSizeWithException() {
+        assertThrows(IllegalArgumentException.class, () -> sut.setMaxSize(0l));
     }
 
     @Test
@@ -75,14 +76,14 @@ public void testInvalidateWholeCache() {
         // prepare cache
         Long key = 0l;
         String value = "x";
-        assertEquals("put value", value, sut.put(key, value));
-        assertEquals("get value", value, sut.get(key));
+        assertEquals(value, sut.put(key, value), "put value");
+        assertEquals(value, sut.get(key), "get value");
 
         // invalidate cache
         sut.invalidate();
 
         // verify invalidation
-        assertEquals("verify that value is no longer here", null, sut.get(key));
+        assertNull(sut.get(key), "verify that value is no longer here");
     }
 
     @Test
@@ -90,19 +91,19 @@ public void testInvalidateOneKeyOfCache() {
         // prepare cache
         Long key1 = 0l;
         String value1 = "x";
-        assertEquals("put value 1", value1, sut.put(key1, value1));
-        assertEquals("get value 1", value1, sut.get(key1));
+        assertEquals(value1, sut.put(key1, value1), "put value 1");
+        assertEquals(value1, sut.get(key1), "get value 1");
 
         Long key2 = 1l;
         String value2 = "y";
-        assertEquals("put value 2", value2, sut.put(key2, value2));
-        assertEquals("get value 2", value2, sut.get(key2));
+        assertEquals(value2, sut.put(key2, value2), "put value 2");
+        assertEquals(value2, sut.get(key2), "get value 2");
 
         // invalidate cache
         sut.invalidate(key1);
 
         // verify invalidation
-        assertEquals("verify that value 1 is no longer here", null, sut.get(key1));
-        assertEquals("verify that value 2 still exists", value2, sut.get(key2));
+        assertNull(sut.get(key1), "verify that value 1 is no longer here");
+        assertEquals(value2, sut.get(key2), "verify that value 2 still exists");
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java
index 76e447b3faa..205b1f0bfcf 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/MailUtilTest.java
@@ -5,7 +5,7 @@
 import edu.harvard.iq.dataverse.branding.BrandingUtil;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 import org.junit.jupiter.api.Order;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.TestMethodOrder;
@@ -14,7 +14,7 @@
 import org.mockito.Mock;
 import org.mockito.Mockito;
 import org.mockito.junit.jupiter.MockitoExtension;
-import org.junit.Before;
+import org.junit.jupiter.api.BeforeEach;
 
 @ExtendWith(MockitoExtension.class)
 @TestMethodOrder(OrderAnnotation.class)
@@ -27,7 +27,7 @@ public class MailUtilTest {
     @Mock
     SettingsServiceBean settingsSvc;
     
-    @Before
+    @BeforeEach
     public void setUp() {
         userNotification = new UserNotification();
 
@@ -80,7 +80,7 @@ public void testSubjectRevokeRole() {
     @Test
     public void testSubjectRequestFileAccess() {
         userNotification.setType(UserNotification.Type.REQUESTFILEACCESS);
-        assertEquals("LibraScholar: Access has been requested for a restricted file", MailUtil.getSubjectTextBasedOnNotification(userNotification, null));
+        assertEquals("LibraScholar: Access has been requested for a restricted file in dataset \"\"", MailUtil.getSubjectTextBasedOnNotification(userNotification, null));
     }
     
     @Test
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/MockResponse.java b/src/test/java/edu/harvard/iq/dataverse/util/MockResponse.java
index 0584c6161f1..8d5b4940c14 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/MockResponse.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/MockResponse.java
@@ -7,14 +7,14 @@
 import java.util.Map;
 import java.util.Set;
 import static java.util.stream.Collectors.toList;
-import javax.ws.rs.core.EntityTag;
-import javax.ws.rs.core.GenericType;
-import javax.ws.rs.core.Link;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedHashMap;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.NewCookie;
-import javax.ws.rs.core.Response;
+import jakarta.ws.rs.core.EntityTag;
+import jakarta.ws.rs.core.GenericType;
+import jakarta.ws.rs.core.Link;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.MultivaluedHashMap;
+import jakarta.ws.rs.core.MultivaluedMap;
+import jakarta.ws.rs.core.NewCookie;
+import jakarta.ws.rs.core.Response;
 
 /**
  * Simple mock class for HTTP response. This is needed as the normal response builders
diff --git a/src/test/java/edu/harvard/iq/dataverse/export/OrganizationsTest.java b/src/test/java/edu/harvard/iq/dataverse/util/OrganizationsTest.java
similarity index 94%
rename from src/test/java/edu/harvard/iq/dataverse/export/OrganizationsTest.java
rename to src/test/java/edu/harvard/iq/dataverse/util/OrganizationsTest.java
index 2552e217cef..fb6a8e14fcd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/export/OrganizationsTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/OrganizationsTest.java
@@ -1,8 +1,8 @@
-package edu.harvard.iq.dataverse.export;
+package edu.harvard.iq.dataverse.util;
 
-import edu.harvard.iq.dataverse.export.openaire.Organizations;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java
index b22f18ca787..c3d9fd8fcd3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/PersonOrOrgUtilTest.java
@@ -1,13 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
-import edu.harvard.iq.dataverse.export.openaire.Organizations;
 import edu.harvard.iq.dataverse.util.json.JsonUtil;
 
-import org.junit.Ignore;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
 
-import javax.json.JsonObject;
+import jakarta.json.JsonObject;
 
 public class PersonOrOrgUtilTest {
 
@@ -59,7 +58,7 @@ public void testOrganizationCommaOrDash() {
             verifyIsOrganization("Geographic Data Technology, Inc. (GDT)");
         }
 
-        @Ignore
+        @Disabled
         @Test
         public void testOrganizationES() {
             //Spanish recognition is not enabled - see export/Organization.java
@@ -86,6 +85,10 @@ public void testName() {
             verifyIsPerson("Francesco", "Francesco", null);
             // test only family name
             verifyIsPerson("Cadili", null, null);
+            
+            verifyIsPerson("kcjim11, kcjim11", "kcjim11", "kcjim11");
+            
+            verifyIsPerson("Bartholomew 3, James", "James", "Bartholomew 3");
         }
         
         private void verifyIsOrganization(String fullName) {
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/StringUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/StringUtilTest.java
index ff505d2ff09..b00b4afca7d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/StringUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/StringUtilTest.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.util;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.Arrays;
 import java.util.Collection;
@@ -10,267 +10,134 @@
 import java.util.Optional;
 import java.util.stream.Stream;
 
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.CsvSource;
+import org.junit.jupiter.params.provider.MethodSource;
 
-
-/**
- *
- * @author michael
- */
-@RunWith(Enclosed.class)
-public class StringUtilTest {
-
-    public StringUtilTest() {
-    }
+class StringUtilTest {
     
-    @BeforeClass
-    public static void setUpClass() {
+    /**
+     * Test of isEmpty method, of class StringUtil.
+     */
+    @ParameterizedTest
+    @CsvSource(value = {
+        "false, a",
+        "true, NULL",
+        "true, ''",
+        "true, ' '",
+        "true, \t",
+        "true, \t \t \n"
+    }, nullValues = "NULL")
+    void testIsEmpty(boolean isValid, String inputString) {
+        assertEquals( isValid, StringUtil.isEmpty(inputString) );
     }
     
-    @AfterClass
-    public static void tearDownClass() {
+    /**
+     * Test of isAlphaNumeric method, of class StringUtil.
+     */
+    @ParameterizedTest
+    @CsvSource({
+        "true,abc",
+        "true,1230",
+        "true,1230abc",
+        "true,1230abcABC",
+        "false,1230abcABC#"
+    })
+    void testIsAlphaNumeric(boolean isValid, String inputString) {
+        assertEquals(isValid, StringUtil.isAlphaNumeric(inputString) );
     }
     
-    @Before
-    public void setUp() {
+    /**
+     * Test of isAlphaNumericChar method, of class StringUtil.
+     */
+    @ParameterizedTest
+    @CsvSource({
+        "true,'a'",
+        "true,'f'",
+        "true,'z'",
+        "true,'0'",
+        "true,'1'",
+        "true,'9'",
+        "true,'A'",
+        "true,'G'",
+        "true,'Z'",
+        "false,'@'"
+    })
+    void testIsAlphaNumericChar(boolean isValid, char inputChar) {
+        assertEquals(isValid, StringUtil.isAlphaNumericChar(inputChar) );
     }
     
-    @After
-    public void tearDown() {
-    }
-
-    @RunWith(Parameterized.class)
-    public static class TestIsEmpty {
-
-        public boolean isValid;
-        public String inputString;
+    @ParameterizedTest
+    @CsvSource(value = {
+        // interface-based partitioning
+        "NULL, NULL, NULL",
+        "NULL, '', NULL",
+        "NULL, d, NULL",
         
-        public TestIsEmpty(boolean isValid, String inputString) {
-            this.isValid = isValid;
-            this.inputString = inputString;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        { true, null },
-                        { true, "" },
-                        { true, " " },
-                        { true, "\t" },
-                        { true, "\t \t \n" },
-                        { false, "a" },
-                    }
-            );
-        }
-
-        /**
-         * Test of isEmpty method, of class StringUtil.
-         */
-        @Test
-        public void testIsEmpty() {
-            assertEquals( isValid, StringUtil.isEmpty(inputString) );
-        }
-    }
-
-    @RunWith(Parameterized.class)
-    public static class TestIsAlphaNumeric {
-
-        public boolean isValid;
-        public String inputString;
+        "'', NULL, ''",
+        "'', '', ''",
+        "'', abcdfg, ''",
         
-        public TestIsAlphaNumeric(boolean isValid, String inputString) {
-            this.isValid = isValid;
-            this.inputString = inputString;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        { true, "abc" },
-                        { true, "1230" },
-                        { true, "1230abc" },
-                        { true, "1230abcABC" },
-                        { false, "1230abcABC#" },
-                    }
-            );
-        }
-
-        /**
-         * Test of isAlphaNumeric method, of class StringUtil.
-         */
-        @Test
-        public void testIsAlphaNumeric() {
-            assertEquals( isValid, StringUtil.isAlphaNumeric(inputString) );
-        }
-    }
-
-    @RunWith(Parameterized.class)
-    public static class TestIsAlphaNumericChar {
-
-        public boolean isValid;
-        public char inputChar;
+        "abcdfg, NULL, ''",
+        "abcdfg, '', ''",
+        "abcdfg, d, dfg",
         
-        public TestIsAlphaNumericChar(boolean isValid, char inputChar) {
-            this.isValid = isValid;
-            this.inputChar = inputChar;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        { true, 'a' },
-                        { true, 'f' },
-                        { true, 'z' },
-                        { true, '0' },
-                        { true, '1' },
-                        { true, '9' },
-                        { true, 'A' },
-                        { true, 'G' },
-                        { true, 'Z' },
-                        { false, '@' },
-                    }
-            );
-        }
-
-        /**
-         * Test of isAlphaNumericChar method, of class StringUtil.
-         */
-        @Test
-        public void testIsAlphaNumericChar() {
-            assertEquals( isValid, StringUtil.isAlphaNumericChar(inputChar) );
-        }
+        // functionality-based partitioning
+        "abcdfg, NULL, ''",
+        "abcdfg, h, ''",
+        "abcdfg, b, bcdfg"
+    }, nullValues = "NULL")
+    void testSubstringIncludingLast(String str, String separator, String expectedString) {
+        assertEquals( expectedString, StringUtil.substringIncludingLast(str, separator) );
     }
 
-    @RunWith(Parameterized.class)
-    public static class TestSubstringIncludingLast {
-
-        public String str;
-        public String separator;
-        public String expectedString;
-        
-        public TestSubstringIncludingLast(String str, String separator, String expectedString) {
-            this.str = str;
-            this.separator = separator;
-            this.expectedString = expectedString;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        // interface-based partitioning
-                        {null, null, null},
-                        {null, "", null},
-                        {null, "d", null},
-
-                        {"", null, ""},
-                        {"", "", ""},
-                        {"", "abcdfg", ""},
-
-                        {"abcdfg", null, ""},
-                        {"abcdfg", "", ""},
-                        {"abcdfg", "d", "dfg"},
-
-                        // functionality-based partitioning
-                        {"abcdfg" , null, ""},
-                        {"abcdfg", "h", ""},
-                        {"abcdfg", "b", "bcdfg"},
-                    }
-            );
-        }
-
-        @Test
-        public void testSubstringIncludingLast() {
-            assertEquals( expectedString, StringUtil.substringIncludingLast(str, separator) );
-        }
+    static Stream<Arguments> toOptionData() {
+        return Stream.of(
+            Arguments.of(Optional.empty(), null),
+            Arguments.of(Optional.empty(), ""),
+            Arguments.of(Optional.of("leadingWhitespace"), "    leadingWhitespace"),
+            Arguments.of(Optional.of("trailingWhiteSpace"), "trailingWhiteSpace    "),
+            Arguments.of(Optional.of("someString"), "someString"),
+            Arguments.of(Optional.of("some string with spaces"), "some string with spaces")
+        );
     }
-
-    @RunWith(Parameterized.class)
-    public static class TestToOption {
-
-        public String inputString;
-        public Optional<String> expected;
-
-        public TestToOption(String inputString, Optional<String> expected) {
-            this.inputString = inputString;
-            this.expected = expected;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        {null, Optional.empty()},
-                        {"", Optional.empty()},
-                        {"    leadingWhitespace", Optional.of("leadingWhitespace")},
-                        {"trailingWhiteSpace    ", Optional.of("trailingWhiteSpace")},
-                        {"someString", Optional.of("someString")},
-                        {"some string with spaces", Optional.of("some string with spaces")}
-                    }
-            );
-        }
-
-        @Test
-        public void testToOption() {
-            assertEquals(expected, StringUtil.toOption(inputString));
-        }
+    
+    @ParameterizedTest
+    @MethodSource("toOptionData")
+    void testToOption(Optional<String> expected, String inputString) {
+        assertEquals(expected, StringUtil.toOption(inputString));
     }
-
-    @RunWith(Parameterized.class)
-    public static class TestSanitizeFileDirectory {
-
-        public String inputString;
-        public String expected;
-        public boolean aggressively;
-
-        public TestSanitizeFileDirectory(String inputString, String expected, boolean aggressively) {
-            this.inputString = inputString;
-            this.expected = expected;
-            this.aggressively = aggressively;
-        }
-
-        @Parameters
-        public static Collection<Object[]> parameters() {
-            return Arrays.asList(
-                    new Object[][] { 
-                        {"some\\path\\to\\a\\directory", "some/path/to/a/directory", false},
-                        {"some\\//path\\//to\\//a\\//directory", "some/path/to/a/directory", false},
-                        // starts with / or - or . or whitepsace
-                        {"/some/path/to/a/directory", "some/path/to/a/directory", false},
-                        {"-some/path/to/a/directory", "some/path/to/a/directory", false},
-                        {".some/path/to/a/directory", "some/path/to/a/directory", false},
-                        {" some/path/to/a/directory", "some/path/to/a/directory", false},
-                        // ends with / or - or . or whitepsace
-                        {"some/path/to/a/directory/", "some/path/to/a/directory", false},
-                        {"some/path/to/a/directory-", "some/path/to/a/directory", false},
-                        {"some/path/to/a/directory.", "some/path/to/a/directory", false},
-                        {"some/path/to/a/directory ", "some/path/to/a/directory", false},
-
-                        {"", null, false},
-                        {"/", null, false},
-
-                        // aggressively
-                        {"some/path/to/a/dire{`~}ctory", "some/path/to/a/dire.ctory", true},
-                        {"some/path/to/a/directory\\.\\.", "some/path/to/a/directory", true},
-                    }
-            );
-        }
-
-        @Test
-        public void testSanitizeFileDirectory() {
-            assertEquals(expected, StringUtil.sanitizeFileDirectory(inputString, aggressively));
-        }
+    
+    static Stream<Arguments> sanitizeData() {
+        return Stream.of(
+            Arguments.of("some\\path\\to\\a\\directory", "some/path/to/a/directory", false),
+            Arguments.of("some\\//path\\//to\\//a\\//directory", "some/path/to/a/directory", false),
+            // starts with / or - or . or whitepsace
+            Arguments.of("/some/path/to/a/directory", "some/path/to/a/directory", false),
+            Arguments.of("-some/path/to/a/directory", "some/path/to/a/directory", false),
+            Arguments.of(".some/path/to/a/directory", "some/path/to/a/directory", false),
+            Arguments.of(" some/path/to/a/directory", "some/path/to/a/directory", false),
+            // ends with / or - or . or whitepsace
+            Arguments.of("some/path/to/a/directory/", "some/path/to/a/directory", false),
+            Arguments.of("some/path/to/a/directory-", "some/path/to/a/directory", false),
+            Arguments.of("some/path/to/a/directory.", "some/path/to/a/directory", false),
+            Arguments.of("some/path/to/a/directory ", "some/path/to/a/directory", false),
+            
+            Arguments.of("", null, false),
+            Arguments.of("/", null, false),
+            
+            // aggressively
+            Arguments.of("some/path/to/a/dire{`~}ctory", "some/path/to/a/dire.ctory", true),
+            Arguments.of("some/path/to/a/directory\\.\\.", "some/path/to/a/directory", true)
+        );
+    }
+    
+    @ParameterizedTest
+    @MethodSource("sanitizeData")
+    void testSanitizeFileDirectory(String inputString, String expected, boolean aggressively) {
+        assertEquals(expected, StringUtil.sanitizeFileDirectory(inputString, aggressively));
     }
 
     public static class StringUtilNoParamTest{
@@ -354,5 +221,23 @@ public void testNonEmpty_emptyString() {
             String expected = "";
             assertFalse(StringUtil.nonEmpty(expected));
         }
+        
+        /**
+         * full name or organization name cleanup.
+         * 
+         * @author francesco.cadili@4science.it
+         * 
+         *         Name is composed of: <First Names> <Family Name>
+         */
+        @Test
+        public void testNormalize() {
+            assertEquals(StringUtil.normalize("    Francesco    "), "Francesco");
+            assertEquals(StringUtil.normalize("Francesco  Cadili "), "Francesco Cadili");
+            assertEquals(StringUtil.normalize("  Cadili,Francesco"), "Cadili, Francesco");
+            assertEquals(StringUtil.normalize("Cadili,     Francesco  "), "Cadili, Francesco");
+            assertEquals(StringUtil.normalize(null), "");
+
+            // TODO: organization examples...
+        }
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java b/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java
index 2806aa3aa9b..82b89bca678 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/SystemConfigTest.java
@@ -3,6 +3,7 @@
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
 import org.junit.jupiter.params.ParameterizedTest;
@@ -15,6 +16,7 @@
 import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.Mockito.doReturn;
 
+@LocalJvmSettings
 @ExtendWith(MockitoExtension.class)
 class SystemConfigTest {
     
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/UrlSignerUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/UrlSignerUtilTest.java
index 2b9d507758f..09739b67023 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/UrlSignerUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/UrlSignerUtilTest.java
@@ -1,12 +1,12 @@
 package edu.harvard.iq.dataverse.util;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class UrlSignerUtilTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java
index 236179bdb12..d70a108e7c6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/UrlTokenUtilTest.java
@@ -1,5 +1,6 @@
 package edu.harvard.iq.dataverse.util;
 
+import edu.harvard.iq.dataverse.DOIServiceBean;
 import edu.harvard.iq.dataverse.DataFile;
 import edu.harvard.iq.dataverse.Dataset;
 import edu.harvard.iq.dataverse.DatasetVersion;
@@ -8,6 +9,7 @@
 import edu.harvard.iq.dataverse.authorization.users.ApiToken;
 import edu.harvard.iq.dataverse.settings.JvmSettings;
 import edu.harvard.iq.dataverse.util.testing.JvmSetting;
+import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings;
 import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
@@ -15,6 +17,7 @@
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
+@LocalJvmSettings
 class UrlTokenUtilTest {
 
     @Test
@@ -29,7 +32,7 @@ void testGetToolUrlWithOptionalQueryParameters() {
         DatasetVersion dv = new DatasetVersion();
         Dataset ds = new Dataset();
         ds.setId(50L);
-        ds.setGlobalId(new GlobalId("doi:10.5072/FK2ABCDEF"));
+        ds.setGlobalId(new GlobalId(DOIServiceBean.DOI_PROTOCOL,"10.5072","FK2ABCDEF",null, DOIServiceBean.DOI_RESOLVER_URL, null));
         dv.setDataset(ds);
         fmd.setDatasetVersion(dv);
         List<FileMetadata> fmdl = new ArrayList<>();
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagChecksumTypeTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagChecksumTypeTest.java
index 2dfaf2b2371..905f8609276 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagChecksumTypeTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagChecksumTypeTest.java
@@ -3,7 +3,7 @@
 import org.apache.commons.io.IOUtils;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.Map;
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidationTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidationTest.java
index 4f7a22833e7..87b3382120d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidationTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidationTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.util.bagit.BagValidation.FileValidationResult;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.nio.file.Path;
 import java.util.Optional;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidatorTest.java
index 24c45a8e75c..cbdb4b66e83 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/BagValidatorTest.java
@@ -8,8 +8,8 @@
 import org.apache.commons.lang3.RandomStringUtils;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.nio.file.Path;
@@ -33,7 +33,7 @@ public class BagValidatorTest {
     private ManifestReader manifestReader;
     private BagValidator target;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         manifestReader = Mockito.mock(ManifestReader.class);
         target = Mockito.spy(new BagValidator(manifestReader));
@@ -46,7 +46,7 @@ public void hasBagItPackage_should_return_false_when_bagit_file_not_found() {
         boolean result = target.hasBagItPackage(fileDataProvider);
 
         MatcherAssert.assertThat(result, Matchers.is(false));
-        Mockito.verifyZeroInteractions(manifestReader);
+        Mockito.verifyNoInteractions(manifestReader);
     }
 
     @Test
@@ -95,7 +95,7 @@ public void validateChecksums_should_return_error_when_no_bagit_file_in_data_pro
         MatcherAssert.assertThat(result.getErrorMessage().isEmpty(), Matchers.is(false));
         Mockito.verify(target).getMessage(Mockito.eq("bagit.validation.bag.file.not.found"), Mockito.any());
 
-        Mockito.verifyZeroInteractions(manifestReader);
+        Mockito.verifyNoInteractions(manifestReader);
     }
 
     @Test
@@ -119,7 +119,7 @@ public void validateChecksums_should_return_error_when_manifest_reader_returns_e
 
         MatcherAssert.assertThat(result.success(), Matchers.is(false));
         MatcherAssert.assertThat(result.getErrorMessage().isEmpty(), Matchers.is(false));
-        Mockito.verify(target).getMessage(Mockito.eq("bagit.validation.manifest.not.supported"), Mockito.any());
+        Mockito.verify(target).getMessage(Mockito.eq("bagit.validation.manifest.not.supported"), Mockito.any(Object[].class));
 
         Mockito.verify(manifestReader).getManifestChecksums(fileDataProvider, expectedBagRoot);
     }
@@ -140,7 +140,7 @@ public void validateChecksums_should_return_error_when_data_provider_do_not_have
         for(Path filePath: checksums.getFileChecksums().keySet()) {
             MatcherAssert.assertThat(result.getFileResults().get(filePath).isError(), Matchers.is(true));
         }
-        Mockito.verify(target, Mockito.times(checksums.getFileChecksums().size())).getMessage(Mockito.eq("bagit.validation.file.not.found"), Mockito.any());
+        Mockito.verify(target, Mockito.times(checksums.getFileChecksums().size())).getMessage(Mockito.eq("bagit.validation.file.not.found"), Mockito.any(Object[].class));
 
         Mockito.verify(manifestReader).getManifestChecksums(fileDataProvider, expectedBagRoot);
         Mockito.verify(fileDataProvider).getFilePaths();
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/FileChecksumValidationJobTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/FileChecksumValidationJobTest.java
index 63a2650a5ef..08e853a58df 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/FileChecksumValidationJobTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/FileChecksumValidationJobTest.java
@@ -5,7 +5,7 @@
 import org.apache.commons.io.IOUtils;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.io.InputStream;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/ManifestReaderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/ManifestReaderTest.java
index e31e2fcc87b..3e84fda92b5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/ManifestReaderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/ManifestReaderTest.java
@@ -5,7 +5,7 @@
 import edu.harvard.iq.dataverse.util.bagit.data.FileDataProviderFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.nio.file.Path;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/DataFileDataProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/DataFileDataProviderTest.java
index 50ebaff2b7c..1d5e20a985e 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/DataFileDataProviderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/DataFileDataProviderTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.DataFile;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.nio.file.Path;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java
index c5eadb09811..f43a0c78284 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java
@@ -2,7 +2,7 @@
 
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.nio.file.Path;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FolderDataProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FolderDataProviderTest.java
index 7e892180bed..e403b32b1b3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FolderDataProviderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FolderDataProviderTest.java
@@ -2,7 +2,7 @@
 
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.io.IOException;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/ZipFileDataProviderTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/ZipFileDataProviderTest.java
index 084fb6ed50f..67bd13d61e4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/ZipFileDataProviderTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/ZipFileDataProviderTest.java
@@ -5,7 +5,7 @@
 import org.apache.commons.compress.archivers.zip.ZipFile;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.io.InputStream;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactoryTest.java b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactoryTest.java
index d3f1dbcf805..8b047443745 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactoryTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerFactoryTest.java
@@ -4,18 +4,18 @@
 import edu.harvard.iq.dataverse.util.bagit.BagValidator;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
+import org.mockito.junit.jupiter.MockitoExtension;
 
 /**
  *
  * @author adaybujeda
  */
-@RunWith(MockitoJUnitRunner.class)
+@ExtendWith(MockitoExtension.class)
 public class BagItFileHandlerFactoryTest {
 
     @Mock
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerPostProcessorTest.java b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerPostProcessorTest.java
index f8c7565af7c..064fa8b440f 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerPostProcessorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerPostProcessorTest.java
@@ -5,7 +5,7 @@
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.nio.file.Path;
 import java.util.ArrayList;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerTest.java
index b3687285f6c..e8dff33db80 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/file/BagItFileHandlerTest.java
@@ -14,8 +14,8 @@
 import edu.harvard.iq.dataverse.util.bagit.data.StringDataProvider;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
 import java.io.File;
@@ -47,7 +47,7 @@ public class BagItFileHandlerTest {
 
     private BagItFileHandler target;
 
-    @Before
+    @BeforeEach
     public void beforeEachTest() {
         FILE_UTIL = Mockito.mock(FileUtilWrapper.class, Mockito.RETURNS_DEEP_STUBS);
         SYSTEM_CONFIG = Mockito.mock(SystemConfig.class, Mockito.RETURNS_DEEP_STUBS);
@@ -99,7 +99,7 @@ public void handleBagItPackage_should_return_error_when_no_files_in_data_provide
         createDataFileResultAsserts(result);
 
         handleBagItPackageAsserts(fileDataProvider);
-        Mockito.verifyZeroInteractions(postProcessor);
+        Mockito.verifyNoInteractions(postProcessor);
     }
 
     @Test
@@ -231,7 +231,7 @@ public void handleBagItPackage_should_return_error_when_FileExceedsMaxSizeExcept
 
         handleBagItPackageAsserts(dataProviderSpy);
         createDataFileAsserts(Arrays.asList(Path.of(bagEntry)), 2);
-        Mockito.verifyZeroInteractions(postProcessor);
+        Mockito.verifyNoInteractions(postProcessor);
     }
 
     @Test
@@ -249,7 +249,7 @@ public void handleBagItPackage_should_return_error_when_the_maximum_number_of_fi
         MatcherAssert.assertThat(result.getErrors().size(), Matchers.is(1));
 
         handleBagItPackageAsserts(dataProviderSpy);
-        Mockito.verifyZeroInteractions(postProcessor);
+        Mockito.verifyNoInteractions(postProcessor);
     }
 
     @Test
@@ -266,7 +266,7 @@ public void handleBagItPackage_should_return_error_when_bag_validation_fails() t
 
         handleBagItPackageAsserts(dataProviderSpy);
         createDataFileAsserts(dataProviderWithDataFiles.dataProvider.getFilePaths());
-        Mockito.verifyZeroInteractions(postProcessor);
+        Mockito.verifyNoInteractions(postProcessor);
     }
 
     private void createDataFileResultAsserts(CreateDataFileResult result) {
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/file/CreateDataFileResultTest.java b/src/test/java/edu/harvard/iq/dataverse/util/file/CreateDataFileResultTest.java
index 7c392418de9..59d3ac15f11 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/file/CreateDataFileResultTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/file/CreateDataFileResultTest.java
@@ -3,7 +3,7 @@
 import edu.harvard.iq.dataverse.DataFile;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.util.Arrays;
 import java.util.Collections;
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java
index dc36b197c55..b426f84a464 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/BriefJsonPrinterTest.java
@@ -6,15 +6,14 @@
 import edu.harvard.iq.dataverse.DatasetFieldType;
 import edu.harvard.iq.dataverse.DatasetVersion;
 import edu.harvard.iq.dataverse.MetadataBlock;
-import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
 import edu.harvard.iq.dataverse.mocks.MocksFactory;
 import edu.harvard.iq.dataverse.workflow.Workflow;
-import javax.json.JsonObject;
-import org.junit.Test;
+import jakarta.json.JsonObject;
+import org.junit.jupiter.api.Test;
 
 import java.util.Collections;
 
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 /**
  *
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/DatasetVersionDTOTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/DatasetVersionDTOTest.java
index c7077f936a0..7dd5ad9da8d 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/DatasetVersionDTOTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/DatasetVersionDTOTest.java
@@ -17,12 +17,12 @@
 import java.util.HashSet;
 import java.util.List;
 import java.util.Scanner;
-import junit.framework.Assert;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 /**
  *
@@ -34,19 +34,19 @@ public class DatasetVersionDTOTest {
     public DatasetVersionDTOTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
        
     }
-    @After
+    @AfterEach
     public void tearDown() {
     }
     
@@ -89,7 +89,7 @@ public void testReadDataSet() {
             JsonElement expected = gson.toJsonTree(expectedDTO, FieldDTO.class);
             JsonElement result = gson.toJsonTree(authorDTO);
             
-            Assert.assertEquals(expected, result);
+            assertEquals(expected, result);
             
         } catch (IOException e) {
             e.printStackTrace();
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
index 579711d63c3..972fc9c41cd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java
@@ -27,19 +27,20 @@
 import edu.harvard.iq.dataverse.license.LicenseServiceBean;
 import edu.harvard.iq.dataverse.mocks.MockDatasetFieldSvc;
 import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Assumptions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
 import org.mockito.Mockito;
 
-import javax.json.Json;
-import javax.json.JsonArray;
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonReader;
-import javax.json.JsonValue;
+import jakarta.json.Json;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonReader;
+import jakarta.json.JsonValue;
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.InputStream;
@@ -58,9 +59,9 @@
 import java.util.Set;
 import java.util.TimeZone;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 /**
  *
@@ -81,15 +82,15 @@ public class JsonParserTest {
     public JsonParserTest() {
     }
     
-    @BeforeClass
+    @BeforeAll
     public static void setUpClass() {
     }
     
-    @AfterClass
+    @AfterAll
     public static void tearDownClass() {
     }
     
-    @Before
+    @BeforeEach
     public void setUp() {
         datasetFieldTypeSvc = new MockDatasetFieldSvc();
         datasetFieldTypeSvc.setMetadataBlock("citation");
@@ -184,8 +185,8 @@ public void testControlledVocalRepeatsRoundTrip() throws JsonParseException {
     }
     
     
-    @Test(expected=JsonParseException.class)
-     public void testChildValidation() throws JsonParseException {
+    @Test
+    void testChildValidation() {
         // This Json String is a compound field that contains the wrong
         // fieldType as a child ("description" is not a child of "coordinate").
         // It should throw a JsonParseException when it encounters the invalid child.
@@ -210,8 +211,8 @@ public void testChildValidation() throws JsonParseException {
         JsonReader jsonReader = Json.createReader(new StringReader(text));
         JsonObject obj = jsonReader.readObject();
 
-        sut.parseField(obj);
-       }
+        assertThrows(JsonParseException.class, () -> sut.parseField(obj));
+    }
     
     
     @Test
@@ -333,12 +334,12 @@ public void testParseMinimalDataverse() throws JsonParseException {
      * @throws JsonParseException if all goes well - this is expected.
      * @throws IOException when test file IO goes wrong - this is bad.
      */
-    @Test(expected = JsonParseException.class)
-    public void testParseNoAliasDataverse() throws JsonParseException, IOException {
+    @Test
+    void testParseNoAliasDataverse() throws IOException {
         JsonObject dvJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/no-alias-dataverse.json")) {
             dvJson = Json.createReader(jsonFile).readObject();
-            Dataverse actual = sut.parseDataverse(dvJson);
+            assertThrows(JsonParseException.class, () -> sut.parseDataverse(dvJson));
         }
     }
     
@@ -347,12 +348,12 @@ public void testParseNoAliasDataverse() throws JsonParseException, IOException {
      * @throws JsonParseException if all goes well - this is expected.
      * @throws IOException when test file IO goes wrong - this is bad.
      */
-    @Test(expected = JsonParseException.class)
-    public void testParseNoNameDataverse() throws JsonParseException, IOException {
+    @Test
+    void testParseNoNameDataverse() throws IOException {
         JsonObject dvJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/no-name-dataverse.json")) {
             dvJson = Json.createReader(jsonFile).readObject();
-            Dataverse actual = sut.parseDataverse(dvJson);
+            assertThrows(JsonParseException.class, () -> sut.parseDataverse(dvJson));
         }
     }
     
@@ -362,12 +363,12 @@ public void testParseNoNameDataverse() throws JsonParseException, IOException {
      * @throws JsonParseException if all goes well - this is expected.
      * @throws IOException when test file IO goes wrong - this is bad.
      */
-    @Test(expected = JsonParseException.class)
-    public void testParseNoContactEmailsDataverse() throws JsonParseException, IOException {
+    @Test
+    void testParseNoContactEmailsDataverse() throws IOException {
         JsonObject dvJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/no-contacts-dataverse.json")) {
             dvJson = Json.createReader(jsonFile).readObject();
-            Dataverse actual = sut.parseDataverse(dvJson);
+            assertThrows(JsonParseException.class, () -> sut.parseDataverse(dvJson));
         }
     }
 
@@ -420,16 +421,14 @@ public void testDateTimeRoundtrip() throws ParseException {
      * Expect an exception when the dataset JSON is empty.
      * @throws JsonParseException when the test is broken
      */
-    @Test(expected = NullPointerException.class)
-    public void testParseEmptyDataset() throws JsonParseException {
+    @Test
+    void testParseEmptyDataset() throws JsonParseException {
         JsonObject dsJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/empty-dataset.json")) {
             InputStreamReader reader = new InputStreamReader(jsonFile, "UTF-8");
             dsJson = Json.createReader(reader).readObject();
             System.out.println(dsJson != null);
-            Dataset actual = sut.parseDataset(dsJson);
-            assertEquals("10.5072", actual.getAuthority());
-            assertEquals("doi", actual.getProtocol());
+            assertThrows(NullPointerException.class, () -> sut.parseDataset(dsJson));
         } catch (IOException ioe) {
             throw new JsonParseException("Couldn't read test file", ioe);
         }
@@ -443,13 +442,13 @@ public void testParseEmptyDataset() throws JsonParseException {
      * @throws IOException when test file IO goes wrong - this is bad.
      */
     @Test
-    public void testParseOvercompleteDatasetVersion() throws JsonParseException, IOException {
+    void testParseOvercompleteDatasetVersion() throws IOException {
         JsonObject dsJson;
         try (InputStream jsonFile = ClassLoader.getSystemResourceAsStream("json/complete-dataset-version.json")) {
             InputStreamReader reader = new InputStreamReader(jsonFile, "UTF-8");
             dsJson = Json.createReader(reader).readObject();
-            System.out.println(dsJson != null);
-            DatasetVersion actual = sut.parseDatasetVersion(dsJson);
+            Assumptions.assumeTrue(dsJson != null);
+            assertDoesNotThrow(() -> sut.parseDatasetVersion(dsJson));
         }
     }
     
@@ -566,31 +565,31 @@ public void testValidRegexMailDomainGroup() throws JsonParseException {
         assertEquals(test.hashCode(), parsed.hashCode());
     }
     
-    @Test(expected = JsonParseException.class)
-    public void testMailDomainGroupMissingName() throws JsonParseException {
+    @Test
+    void testMailDomainGroupMissingName() {
         // given
         String noname = "{ \"id\": 1, \"alias\": \"test\", \"domains\": [] }";
         JsonObject obj = Json.createReader(new StringReader(noname)).readObject();
         // when && then
-        MailDomainGroup parsed = new JsonParser().parseMailDomainGroup(obj);
+        assertThrows(JsonParseException.class, () -> new JsonParser().parseMailDomainGroup(obj));
     }
     
-    @Test(expected = JsonParseException.class)
-    public void testMailDomainGroupMissingDomains() throws JsonParseException {
+    @Test
+    void testMailDomainGroupMissingDomains() {
         // given
         String noname = "{ \"name\": \"test\", \"alias\": \"test\" }";
         JsonObject obj = Json.createReader(new StringReader(noname)).readObject();
         // when && then
-        MailDomainGroup parsed = new JsonParser().parseMailDomainGroup(obj);
+        assertThrows(JsonParseException.class, () -> new JsonParser().parseMailDomainGroup(obj));
     }
     
-    @Test(expected = JsonParseException.class)
-    public void testMailDomainGroupNotEnabledRegexDomains() throws JsonParseException {
+    @Test
+    void testMailDomainGroupNotEnabledRegexDomains() {
         // given
         String regexNotEnabled = "{ \"id\": 1, \"alias\": \"test\", \"domains\": [\"^foobar\\\\.com\"] }";
         JsonObject obj = Json.createReader(new StringReader(regexNotEnabled)).readObject();
         // when && then
-        MailDomainGroup parsed = new JsonParser().parseMailDomainGroup(obj);
+        assertThrows(JsonParseException.class, () -> new JsonParser().parseMailDomainGroup(obj));
     }
 
     @Test
@@ -712,8 +711,8 @@ public void testEnum() throws JsonParseException {
             arr.add(entry.name());
         }
         Set<Type> typesSet = new HashSet<>(JsonParser.parseEnumsFromArray(arr.build(), Type.class));
-        assertTrue("Set contains two elements", typesSet.size() == 2);
-        assertTrue("Set contains REVOKEROLE", typesSet.contains(Type.REVOKEROLE));
-        assertTrue("Set contains ASSIGNROLE", typesSet.contains(Type.ASSIGNROLE));
+        assertEquals(2, typesSet.size(), "Set contains two elements");
+        assertTrue(typesSet.contains(Type.REVOKEROLE), "Set contains REVOKEROLE");
+        assertTrue(typesSet.contains(Type.ASSIGNROLE), "Set contains ASSIGNROLE");
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
index cbefd3be0ad..88f6a5bdbce 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java
@@ -19,21 +19,25 @@
 import java.util.List;
 import java.util.Set;
 
-import javax.json.JsonArrayBuilder;
-import javax.json.JsonObject;
-import javax.json.JsonObjectBuilder;
-import javax.json.JsonString;
-import org.junit.Test;
-import org.junit.Before;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
+import jakarta.json.JsonArrayBuilder;
+import jakarta.json.JsonObject;
+import jakarta.json.JsonObjectBuilder;
+import jakarta.json.JsonString;
+
+import edu.harvard.iq.dataverse.util.BundleUtil;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.BeforeEach;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.*;
 import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
 
 public class JsonPrinterTest {
 
     MockDatasetFieldSvc datasetFieldTypeSvc = null;
 
-    @Before
+    @BeforeEach
     public void setUp() {
         datasetFieldTypeSvc = new MockDatasetFieldSvc();
         datasetFieldTypeSvc.setMetadataBlock("citation");
@@ -110,7 +114,7 @@ public void testJson_RoleAssignment() {
         JsonObjectBuilder job = JsonPrinter.json(ra);
         assertNotNull(job);
         JsonObject jsonObject = job.build();
-        assertEquals("#42", jsonObject.getString("assignee"));
+        assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getString("assignee"));
         assertEquals(123, jsonObject.getInt("definitionPointId"));
         assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("privateUrlToken"));
     }
@@ -131,7 +135,7 @@ public void testJson_PrivateUrl() {
         assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token"));
         assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link"));
         assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken"));
-        assertEquals("#42", jsonObject.getJsonObject("roleAssignment").getString("assignee"));
+        assertEquals(PrivateUrlUser.PREFIX + "42", jsonObject.getJsonObject("roleAssignment").getString("assignee"));
     }
 
     @Test
@@ -201,7 +205,7 @@ public void testDatasetContactOutOfBoxNoPrivacy() {
         SettingsServiceBean nullServiceBean = null;
         DatasetFieldServiceBean nullDFServiceBean = null;
         JsonPrinter.injectSettingsService(nullServiceBean, nullDFServiceBean);
-        
+
         JsonObject jsonObject = JsonPrinter.json(block, fields).build();
         assertNotNull(jsonObject);
 
@@ -240,7 +244,7 @@ public void testDatasetContactWithPrivacy() {
         vals.add(val);
         datasetContactField.setDatasetFieldCompoundValues(vals);
         fields.add(datasetContactField);
-        
+
         DatasetFieldServiceBean nullDFServiceBean = null;
         JsonPrinter.injectSettingsService(new MockSettingsSvc(), nullDFServiceBean);
 
@@ -319,4 +323,32 @@ public void testEnum() throws JsonParseException {
         assertTrue(typesSet.contains("ASSIGNROLE"));
     }
 
+    @Test
+    public void testMetadataBlockAnonymized() {
+        MetadataBlock block = new MetadataBlock();
+        block.setName("citation");
+        List<DatasetField> fields = new ArrayList<>();
+        DatasetField datasetAuthorField = new DatasetField();
+        DatasetFieldType datasetAuthorFieldType = datasetFieldTypeSvc.findByName("author");
+        datasetAuthorFieldType.setMetadataBlock(block);
+        datasetAuthorField.setDatasetFieldType(datasetAuthorFieldType);
+        List<DatasetFieldCompoundValue> compoundValues = new LinkedList<>();
+        DatasetFieldCompoundValue compoundValue = new DatasetFieldCompoundValue();
+        compoundValue.setParentDatasetField(datasetAuthorField);
+        compoundValue.setChildDatasetFields(Arrays.asList(
+                constructPrimitive("authorName", "Test Author"),
+                constructPrimitive("authorAffiliation", "Test Affiliation")
+        ));
+        compoundValues.add(compoundValue);
+        datasetAuthorField.setDatasetFieldCompoundValues(compoundValues);
+        fields.add(datasetAuthorField);
+
+        JsonObject actualJsonObject = JsonPrinter.json(block, fields, List.of("author")).build();
+
+        assertNotNull(actualJsonObject);
+        JsonObject actualAuthorJsonObject = actualJsonObject.getJsonArray("fields").getJsonObject(0);
+        assertEquals(BundleUtil.getStringFromBundle("dataset.anonymized.withheld"), actualAuthorJsonObject.getString("value"));
+        assertEquals("primitive", actualAuthorJsonObject.getString("typeClass"));
+        assertFalse(actualAuthorJsonObject.getBoolean("multiple"));
+    }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java
index 847fbfc9cba..3e4f9a690d2 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonUtilTest.java
@@ -1,19 +1,19 @@
 package edu.harvard.iq.dataverse.util.json;
 
-import static org.junit.Assert.assertEquals;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
 
-public class JsonUtilTest {
+class JsonUtilTest {
 
     @Test
-    public void testPrettyPrint() {
-        JsonUtil jsonUtil = new JsonUtil();
+    void testPrettyPrint() {
         String nullString = null;
         assertEquals(null, JsonUtil.prettyPrint(nullString));
         assertEquals("", JsonUtil.prettyPrint(""));
         assertEquals("junk", JsonUtil.prettyPrint("junk"));
-        assertEquals("{}", JsonUtil.prettyPrint("{}"));
-        assertEquals("{\n" + "  \"foo\": \"bar\"\n" + "}", JsonUtil.prettyPrint("{\"foo\": \"bar\"}"));
+        assertEquals("{\n}", JsonUtil.prettyPrint("{}"));
+        assertEquals("[\n    \"junk\"\n]", JsonUtil.prettyPrint("[\"junk\"]"));
+        assertEquals("{\n" + "    \"foo\": \"bar\"\n" + "}", JsonUtil.prettyPrint("{\"foo\": \"bar\"}"));
     }
 
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
index 8aa10c9667f..b93028b6365 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java
@@ -11,16 +11,18 @@
 import edu.harvard.iq.dataverse.util.ShapefileHandler;
 import static edu.harvard.iq.dataverse.util.ShapefileHandler.SHP_XML_EXTENSION;
 
+import java.nio.file.Files;
+import java.nio.file.Path;
 import java.util.Arrays;
 import java.util.List;
 import java.io.File;
 
-import org.junit.Rule;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
 
-
-import org.junit.rules.TemporaryFolder;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
@@ -40,10 +42,8 @@
  */
 public class ShapefileHandlerTest {
     
-        
-    @Rule
-    public TemporaryFolder tempFolder = new TemporaryFolder();
-   
+    @TempDir
+    Path tempFolder;
     
     public void msg(String s){
             System.out.println(s);
@@ -61,28 +61,9 @@ private File createBlankFile(String filename) throws IOException {
         if (filename == null){
             return null;
         }
-        File aFile = this.tempFolder.newFile(filename);
-        //  FileUtils.writeStringToFile(tempFile, "hello world");
-
-        aFile.createNewFile();
-        return aFile;
+        return Files.createFile(tempFolder.resolve(filename)).toFile();
     }
     
-
-     
-     
-    private void showFilesInFolder(String m, String folder_name) throws IOException{
-        msgt(m);
-        File folder = new File(folder_name);
-        for (File f : folder.listFiles() ){
-            this.msg("fname: " + f.getCanonicalPath());
-        }
-    } 
-         
-    private void showFilesInTempFolder(String m) throws IOException{
-        this.showFilesInFolder(m, this.tempFolder.getRoot().getAbsolutePath());
-    } 
-    
     private FileInputStream createZipReturnFilestream(List<String> file_names, String zipfile_name) throws IOException{
         
         File zip_file_obj = this.createAndZipFiles(file_names, zipfile_name);
@@ -116,8 +97,8 @@ private File createAndZipFiles(List<String> file_names, String zipfile_name) thr
            //msg("File created: " + file_obj.getName());           
         }
         
-        File zip_file_obj = this.tempFolder.newFile(zipfile_name);
-        ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj));
+        Path zip_file_obj = this.tempFolder.resolve(zipfile_name);
+        ZipOutputStream zip_stream = new ZipOutputStream(new FileOutputStream(zip_file_obj.toFile()));
 
         // Iterate through File objects and add them to the ZipOutputStream
         for (File file_obj : fileCollection) {
@@ -131,7 +112,7 @@ private File createAndZipFiles(List<String> file_names, String zipfile_name) thr
              file_obj.delete();
         }
         
-        return zip_file_obj;
+        return zip_file_obj.toFile();
         
     } // end createAndZipFiles
     
@@ -149,17 +130,14 @@ public void testCreateZippedNonShapefile() throws IOException{
         shp_handler.DEBUG= true;
 
         // Contains shapefile?
-        assertEquals(shp_handler.containsShapefile(), false);
+        assertFalse(shp_handler.containsShapefile());
 
         // get file_groups Map
         Map<String, List<String>> file_groups = shp_handler.getFileGroups();
         
-        assertEquals("verify that the dict is not empty", file_groups.isEmpty(), false);
-        assertEquals("verify key existance", file_groups.containsKey("not-quite-a-shape"), true);
-        assertEquals("verify value of key", file_groups.get("not-quite-a-shape"), Arrays.asList("shp", "shx", "dbf", "pdf"));
-        
-        this.showFilesInTempFolder(this.tempFolder.getRoot().getAbsolutePath());
-
+        assertFalse(file_groups.isEmpty(), "verify that the dict is not empty");
+        assertTrue(file_groups.containsKey("not-quite-a-shape"), "verify key existance");
+        assertEquals(List.of("shp", "shx", "dbf", "pdf"), file_groups.get("not-quite-a-shape"), "verify value of key");
         
         // Delete .zip
         zipfile_obj.delete();
@@ -185,8 +163,8 @@ public void testZippedTwoShapefiles() throws IOException{
         ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj));
         shp_handler.DEBUG= true;
         
-        assertEquals("verify shapefile existance", shp_handler.containsShapefile(), true);
-        assertEquals("verify that no error was found", shp_handler.errorFound, false);
+        assertTrue(shp_handler.containsShapefile(), "verify shapefile existance");
+        assertFalse(shp_handler.errorFound, "verify that no error was found");
         
         shp_handler.showFileGroups();
        // if (true){
@@ -195,21 +173,18 @@ public void testZippedTwoShapefiles() throws IOException{
         // get file_groups Map
         Map<String, List<String>> file_groups = shp_handler.getFileGroups();
         
-        assertEquals("verify that the dict is not empty", file_groups.isEmpty(), false);
+        assertFalse(file_groups.isEmpty(), "verify that the dict is not empty");
 
         // Verify the keys
-        assertEquals("verify key existance of 'shape1'", file_groups.containsKey("shape1"), true);
-        assertEquals("verify key existance of 'shape2'", file_groups.containsKey("shape2"), true);
+        assertTrue(file_groups.containsKey("shape1"), "verify key existance of 'shape1'");
+        assertTrue(file_groups.containsKey("shape2"), "verify key existance of 'shape2'");
 
         // Verify the values
-        assertEquals("verify value of key 'shape1'", file_groups.get("shape1"), Arrays.asList("shp", "shx", "dbf", "prj", "fbn", "fbx"));
-        assertEquals("verify value of key 'shape2'", file_groups.get("shape2"), Arrays.asList("shp", "shx", "dbf", "prj", "txt", "pdf", ShapefileHandler.BLANK_EXTENSION));
-        
-        this.showFilesInTempFolder(this.tempFolder.getRoot().getAbsolutePath());
-
+        assertEquals(file_groups.get("shape1"), Arrays.asList("shp", "shx", "dbf", "prj", "fbn", "fbx"), "verify value of key 'shape1'");
+        assertEquals(file_groups.get("shape2"), Arrays.asList("shp", "shx", "dbf", "prj", "txt", "pdf", ShapefileHandler.BLANK_EXTENSION), "verify value of key 'shape2'");
         
         // Rezip/Reorder the files
-        File test_unzip_folder = this.tempFolder.newFolder("test_unzip").getAbsoluteFile();
+        File test_unzip_folder = Files.createDirectory(this.tempFolder.resolve("test_unzip")).toFile();
         //File test_unzip_folder = new File("/Users/rmp553/Desktop/blah");
         shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), test_unzip_folder );
         
@@ -218,9 +193,9 @@ public void testZippedTwoShapefiles() throws IOException{
         List<String> rezipped_filenames = new ArrayList<>();
         rezipped_filenames.addAll(Arrays.asList(test_unzip_folder.list()));
         msg("rezipped_filenames: " + rezipped_filenames);
-        List<String> expected_filenames = Arrays.asList("shape1.zip", "shape2.zip", "shape2.txt", "shape2.pdf", "shape2", "README.MD", "shp_dictionary.xls", "notes");  
-
-        assertEquals("verify that all files exist", rezipped_filenames.containsAll(rezipped_filenames), true);
+        List<String> expected_filenames = Arrays.asList("shape1.zip", "shape2.zip", "shape2.txt", "shape2.pdf", "shape2", "README.MD", "shp_dictionary.xls", "notes");
+        
+        assertTrue(rezipped_filenames.containsAll(expected_filenames), "verify that all files exist");
         
         // Delete .zip
         zipfile_obj.delete();
@@ -240,23 +215,23 @@ public void testZippedShapefileWithExtraFiles() throws IOException{
         // Pass the .zip to the ShapefileHandler
         ShapefileHandler shp_handler = new ShapefileHandler(new FileInputStream(zipfile_obj));
         shp_handler.DEBUG= true;
-
-        assertEquals("verify shapefile existance", shp_handler.containsShapefile(), true);
+        
+        assertTrue(shp_handler.containsShapefile(), "verify shapefile existance");
 
         // get file_groups Map
         Map<String, List<String>> file_groups = shp_handler.getFileGroups();
-
-        assertEquals("verify that the dict is not empty", file_groups.isEmpty(), false);
+        
+        assertFalse(file_groups.isEmpty(), "verify that the dict is not empty");
 
         // Verify the keys
-        assertEquals("verify key existance of 'shape1'", file_groups.containsKey("shape1"), true);
+        assertTrue(file_groups.containsKey("shape1"), "verify key existance of 'shape1'");
 
         // Verify the values
-        assertEquals("verify value of key 'shape1'", file_groups.get("shape1"), Arrays.asList("shp", "shx", "dbf", "prj", "pdf", "cpg", SHP_XML_EXTENSION));
-        assertEquals("verify value of key 'README'", file_groups.get("README"), Arrays.asList("md"));
-        assertEquals("verify value of key 'shape_notes'", file_groups.get("shape_notes"), Arrays.asList("txt"));
+        assertEquals(List.of("shp", "shx", "dbf", "prj", "pdf", "cpg", SHP_XML_EXTENSION), file_groups.get("shape1"), "verify value of key 'shape1'");
+        assertEquals(List.of("md"), file_groups.get("README"), "verify value of key 'README'");
+        assertEquals(List.of("txt"), file_groups.get("shape_notes"), "verify value of key 'shape_notes'");
         
-        File unzip2Folder = this.tempFolder.newFolder("test_unzip2").getAbsoluteFile();
+        File unzip2Folder = Files.createDirectory(this.tempFolder.resolve("test_unzip2")).toFile();
         // Rezip/Reorder the files
         shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), unzip2Folder);
         //shp_handler.rezipShapefileSets(new FileInputStream(zipfile_obj), new File("/Users/rmp553/Desktop/blah"));
@@ -267,9 +242,9 @@ public void testZippedShapefileWithExtraFiles() throws IOException{
         rezipped_filenames.addAll(Arrays.asList(unzip2Folder.list()));
         
         msg("rezipped_filenames: " + rezipped_filenames);
-        List<String> expected_filenames = Arrays.asList("shape1.zip", "scratch-for-unzip-12345", "shape1.pdf", "README.md", "shape_notes.txt");  
-
-        assertEquals("verify that all files exist", expected_filenames.containsAll(rezipped_filenames), true);
+        List<String> expected_filenames = Arrays.asList("shape1.zip", "scratch-for-unzip-12345", "shape1.pdf", "README.md", "shape_notes.txt");
+        
+        assertTrue(expected_filenames.containsAll(rezipped_filenames), "verify that all files exist");
         
         // Delete .zip
         zipfile_obj.delete();
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java
index f54cadaf253..85b10489f15 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSetting.java
@@ -39,6 +39,8 @@
 @ResourceLock(value = Resources.SYSTEM_PROPERTIES, mode = ResourceAccessMode.READ_WRITE)
 public @interface JvmSetting {
     
+    static final String PLACEHOLDER = "NULL";
+    
     /**
      * The key of the system property to be set.
      */
@@ -47,10 +49,12 @@
     /**
      * The value of the system property to be set.
      */
-    String value();
+    String value() default PLACEHOLDER;
     
     String[] varArgs() default {};
     
+    String method() default PLACEHOLDER;
+    
     /**
      * Containing annotation of repeatable {@code @SetSystemProperty}.
      */
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java
new file mode 100644
index 00000000000..1235df89b3e
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingBroker.java
@@ -0,0 +1,43 @@
+package edu.harvard.iq.dataverse.util.testing;
+
+import java.io.IOException;
+
+/**
+ * Provide an interface to access and manipulate {@link edu.harvard.iq.dataverse.settings.JvmSettings}
+ * at some place (local, remote, different ways to access, etc.).
+ * Part of the {@link JvmSettingExtension} extension to allow JUnit5 tests to manipulate these
+ * settings, enabling to test different code paths and so on.
+ * @implNote Keep in mind to use methods that do not require restarts or similar to set or delete a setting.
+ *           This must be changeable on the fly, otherwise it will be useless for testing.
+ *           Yes, non-hot-reloadable settings may be a problem. The code should be refactored in these cases.
+ */
+public interface JvmSettingBroker {
+    
+    /**
+     * Receive the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} given as its {@link String}
+     * representation. The reason for this is that we may have inserted variable names already.
+     * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn".
+     * @return The value of the setting if present or null.
+     * @throws IOException When communication goes sideways.
+     */
+    String getJvmSetting(String key) throws IOException;
+    
+    /**
+     * Set the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} (given as its {@link String}
+     * representation). The reason for this is that we may have inserted variable names already.
+     * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn".
+     * @param value The JVM setting's value we want to have it set to.
+     * @throws IOException When communication goes sideways.
+     */
+    void setJvmSetting(String key, String value) throws IOException;
+    
+    /**
+     * Remove the value of a {@link edu.harvard.iq.dataverse.settings.JvmSettings} (given as its {@link String}
+     * representation). For some tests, one might want to clear a certain setting again and potentially have it set
+     * back afterward. The reason for this is that we may have inserted variable names already.
+     * @param key The JVM setting to receive as key, e.g. "dataverse.fqdn".
+     * @throws IOException When communication goes sideways.
+     */
+    String deleteJvmSetting(String key) throws IOException;
+    
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java
index 56e87589139..2065d7b3ae6 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/JvmSettingExtension.java
@@ -1,58 +1,124 @@
 package edu.harvard.iq.dataverse.util.testing;
 
 import edu.harvard.iq.dataverse.settings.JvmSettings;
+import org.junit.jupiter.api.extension.AfterAllCallback;
 import org.junit.jupiter.api.extension.AfterTestExecutionCallback;
+import org.junit.jupiter.api.extension.BeforeAllCallback;
 import org.junit.jupiter.api.extension.BeforeTestExecutionCallback;
 import org.junit.jupiter.api.extension.ExtensionContext;
+import org.junit.platform.commons.support.AnnotationSupport;
+import org.junit.platform.commons.support.ReflectionSupport;
 
-public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback {
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.List;
+import java.util.Optional;
+
+import static edu.harvard.iq.dataverse.util.testing.JvmSetting.PLACEHOLDER;
+
+public class JvmSettingExtension implements BeforeTestExecutionCallback, AfterTestExecutionCallback, BeforeAllCallback, AfterAllCallback {
     
-    private ExtensionContext.Store getStore(ExtensionContext context) {
-        return context.getStore(ExtensionContext.Namespace.create(getClass(), context.getRequiredTestClass(), context.getRequiredTestMethod()));
+    @Override
+    public void beforeAll(ExtensionContext extensionContext) throws Exception {
+        List<JvmSetting> settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestClass(), JvmSetting.class);
+        ExtensionContext.Store store = extensionContext.getStore(
+            ExtensionContext.Namespace.create(getClass(), extensionContext.getRequiredTestClass()));
+        
+        setSetting(extensionContext.getRequiredTestClass(), settings, getBroker(extensionContext), store);
+    }
+    
+    @Override
+    public void afterAll(ExtensionContext extensionContext) throws Exception {
+        List<JvmSetting> settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestClass(), JvmSetting.class);
+        ExtensionContext.Store store = extensionContext.getStore(
+            ExtensionContext.Namespace.create(getClass(), extensionContext.getRequiredTestClass()));
+        
+        resetSetting(settings, getBroker(extensionContext), store);
     }
     
     @Override
     public void beforeTestExecution(ExtensionContext extensionContext) throws Exception {
-        extensionContext.getTestMethod().ifPresent(method -> {
-            JvmSetting[] settings = method.getAnnotationsByType(JvmSetting.class);
-            for (JvmSetting setting : settings) {
-                // get the setting name (might need var args substitution)
-                String settingName = getSettingName(setting);
-                
-                // get the setting ...
-                String oldSetting = System.getProperty(settingName);
+        List<JvmSetting> settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestMethod(), JvmSetting.class);
+        ExtensionContext.Store store = extensionContext.getStore(
+            ExtensionContext.Namespace.create(
+                getClass(),
+                extensionContext.getRequiredTestClass(),
+                extensionContext.getRequiredTestMethod()
+            ));
+        
+        setSetting(extensionContext.getRequiredTestClass(), settings, getBroker(extensionContext), store);
+    }
+    
+    @Override
+    public void afterTestExecution(ExtensionContext extensionContext) throws Exception {
+        List<JvmSetting> settings = AnnotationSupport.findRepeatableAnnotations(extensionContext.getTestMethod(), JvmSetting.class);
+        ExtensionContext.Store store = extensionContext.getStore(
+            ExtensionContext.Namespace.create(
+                getClass(),
+                extensionContext.getRequiredTestClass(),
+                extensionContext.getRequiredTestMethod()
+            ));
+        
+        resetSetting(settings, getBroker(extensionContext), store);
+    }
     
-                // if present - store in context to restore later
-                if (oldSetting != null) {
-                    getStore(extensionContext).put(settingName, oldSetting);
+    private void setSetting(Class<?> testClass, List<JvmSetting> settings, JvmSettingBroker broker, ExtensionContext.Store store) throws Exception {
+        for (JvmSetting setting : settings) {
+            // get the setting name (might need var args substitution)
+            String settingName = getSettingName(setting);
+            
+            // get the setting value ...
+            String oldSetting = broker.getJvmSetting(settingName);
+            
+            // if present - store in context to restore later
+            if (oldSetting != null) {
+                store.put(settingName, oldSetting);
+            }
+            
+            // set to new value
+            if (setting.value().equals(PLACEHOLDER) && setting.method().equals(PLACEHOLDER)) {
+                throw new IllegalArgumentException("You must either provide a value or a method reference " +
+                    "for key JvmSettings" + setting.key());
+            }
+            
+            String value;
+            // Retrieve value from static (!) test class method if no direct setting given
+            if (setting.value().equals(PLACEHOLDER)) {
+                Optional<Method> valueMethod = ReflectionSupport.findMethod(testClass, setting.method());
+                if (valueMethod.isEmpty() || ! Modifier.isStatic(valueMethod.get().getModifiers())) {
+                    throw new IllegalStateException("Could not find a static method '" + setting.method() + "' in test class");
                 }
-                
-                // set to new value
-                System.setProperty(settingName, setting.value());
+                value = (String) ReflectionSupport.invokeMethod(valueMethod.get(), null);
+            // Set to new value by using the directly given value
+            } else {
+                value = setting.value();
             }
-        });
+            
+            // If the retrieved value is null, delete the setting (will be reset after the test), otherwise set.
+            if (value != null) {
+                broker.setJvmSetting(settingName, value);
+            } else if (oldSetting != null) {
+                broker.deleteJvmSetting(settingName);
+            }
+        }
     }
     
-    @Override
-    public void afterTestExecution(ExtensionContext extensionContext) throws Exception {
-        extensionContext.getTestMethod().ifPresent(method -> {
-            JvmSetting[] settings = method.getAnnotationsByType(JvmSetting.class);
-            for (JvmSetting setting : settings) {
-                // get the setting name (might need var args substitution)
-                String settingName = getSettingName(setting);
-                
-                // get a stored setting from context
-                String oldSetting = getStore(extensionContext).remove(settingName, String.class);
-                
-                // if present before, restore
-                if (oldSetting != null) {
-                    System.setProperty(settingName, oldSetting);
+    private void resetSetting(List<JvmSetting> settings, JvmSettingBroker broker, ExtensionContext.Store store) throws Exception {
+        for (JvmSetting setting : settings) {
+            // get the setting name (might need var args substitution)
+            String settingName = getSettingName(setting);
+            
+            // get a stored setting from context
+            String oldSetting = store.remove(settingName, String.class);
+            
+            // if present before, restore
+            if (oldSetting != null) {
+                broker.setJvmSetting(settingName, oldSetting);
                 // if NOT present before, delete
-                } else {
-                    System.clearProperty(settingName);
-                }
+            } else {
+                broker.deleteJvmSetting(settingName);
             }
-        });
+        }
     }
     
     private String getSettingName(JvmSetting setting) {
@@ -72,4 +138,15 @@ private String getSettingName(JvmSetting setting) {
         
         return target.getScopedKey();
     }
+    
+    private JvmSettingBroker getBroker(ExtensionContext extensionContext) throws Exception {
+        // Is this test class using local system properties, then get a broker for these
+        if (AnnotationSupport.isAnnotated(extensionContext.getTestClass(), LocalJvmSettings.class)) {
+            return LocalJvmSettings.localBroker;
+        // NOTE: this might be extended later with other annotations to support other means of handling the settings
+        } else {
+            throw new IllegalStateException("You must provide the @LocalJvmSettings annotation to the test class");
+        }
+    }
+    
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java
new file mode 100644
index 00000000000..372fa91f6f6
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/LocalJvmSettings.java
@@ -0,0 +1,39 @@
+package edu.harvard.iq.dataverse.util.testing;
+
+import org.junit.jupiter.api.extension.ExtendWith;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Inherited;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * This annotation expresses that a test class wants to manipulate local
+ * settings (because the tests run within the same JVM as the code itself).
+ * This is mostly true for unit tests.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ ElementType.TYPE })
+@ExtendWith(JvmSettingExtension.class)
+@Inherited
+public @interface LocalJvmSettings {
+    
+    JvmSettingBroker localBroker = new JvmSettingBroker() {
+        @Override
+        public String getJvmSetting(String key) {
+            return System.getProperty(key);
+        }
+        
+        @Override
+        public void setJvmSetting(String key, String value) {
+            System.setProperty(key, value);
+        }
+        
+        @Override
+        public String deleteJvmSetting(String key) {
+            return System.clearProperty(key);
+        }
+    };
+    
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java b/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java
new file mode 100644
index 00000000000..1544d393896
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/util/testing/Tags.java
@@ -0,0 +1,7 @@
+package edu.harvard.iq.dataverse.util.testing;
+
+public class Tags {
+    public static final String NOT_ESSENTIAL_UNITTESTS = "not-essential-unittests";
+    public static final String INTEGRATION_TEST = "integration";
+    public static final String USES_TESTCONTAINERS = "testcontainers";
+}
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlPrinterTest.java
index 8b027b797c5..1a190389ed5 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlPrinterTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlPrinterTest.java
@@ -1,7 +1,7 @@
 package edu.harvard.iq.dataverse.util.xml;
 
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 public class XmlPrinterTest {
 
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java
index dbb87e9e560..f339691fde3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/xml/XmlValidatorTest.java
@@ -1,28 +1,27 @@
 package edu.harvard.iq.dataverse.util.xml;
 
-import edu.harvard.iq.dataverse.NonEssentialTests;
-
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URL;
 
 import javax.xml.parsers.ParserConfigurationException;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.junit.jupiter.api.Assertions.fail;
 
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import edu.harvard.iq.dataverse.util.testing.Tags;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 import org.xml.sax.SAXException;
 
 public class XmlValidatorTest {
 
     //Ignored as this relies on an external resource that has been down occasionally. 
     //May be a good test for our full vs. everytime test classifications (#4896) -MAD 4.9.1
-    @Ignore
-    @Category(NonEssentialTests.class)
+    @Disabled
+    @Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
     @Test
     public void testValidateXml() throws IOException, SAXException, ParserConfigurationException {
         assertTrue(XmlValidator.validateXmlSchema("src/test/java/edu/harvard/iq/dataverse/util/xml/sendToDataCite.xml", new URL("https://schema.datacite.org/meta/kernel-3/metadata.xsd")));
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/EMailValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/EMailValidatorTest.java
index 80d848248c0..0cbc9e52759 100644
--- a/src/test/java/edu/harvard/iq/dataverse/validation/EMailValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/EMailValidatorTest.java
@@ -4,9 +4,9 @@
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
 
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
 
 import java.util.Set;
 import java.util.stream.Stream;
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/PasswordValidatorUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/PasswordValidatorUtilTest.java
index b574ac4a082..c5a38c473fb 100644
--- a/src/test/java/edu/harvard/iq/dataverse/validation/PasswordValidatorUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/PasswordValidatorUtilTest.java
@@ -5,24 +5,18 @@
  */
 package edu.harvard.iq.dataverse.validation;
 
-import edu.harvard.iq.dataverse.util.BundleUtil;
 import edu.harvard.iq.dataverse.util.xml.html.HtmlPrinter;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.List;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.runners.Enclosed;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import java.util.stream.Stream;
 
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.*;
+
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 import org.passay.CharacterRule;
 import org.passay.EnglishCharacterData;
 
@@ -30,10 +24,9 @@
  *
  * @author pdurbin
  */
-@RunWith(Enclosed.class)
 public class PasswordValidatorUtilTest {
 
-    public static class PasswordValidatorUtilNoParamTest {
+    static class PasswordValidatorUtilNoParamTest {
         /**
          * Test of getPasswordRequirements method, of class PasswordValidatorUtil.
          */
@@ -83,79 +76,38 @@ public void testParseConfigString() {
 
     }
 
-    @RunWith(Parameterized.class)
-    public static class PasswordValidatorUtilParamTest {
-
-        // influences use of # or "each" in text generation
-        @Parameter(0)
-        public int numberOfCharacteristics;
-
-        @Parameter(1)
-        public String characterRulesConfigString;
-
-        @Parameter(2)
-        public String expectedValue;
-
-        @Parameters
-        public static Collection data() {
-            return Arrays.asList(new Object[][] {
-                {
-                    2,
-                    null,
-                    "At least 1 character from each of the following types: letter, numeral"
-                },
-                {
-                    2,
-                    "UpperCase:1,LowerCase:1,Digit:1,Special:1",
-                    "At least 1 character from 2 of the following types: uppercase, lowercase, numeral, special"
-                },
-                {
-                    4,
-                    "UpperCase:1,LowerCase:1,Digit:1,Special:1",
-                    "At least 1 character from each of the following types: uppercase, lowercase, numeral, special"
-                },
-
-                // Should say each, even if more characteristics set than possible
-                {
-                    2,
-                    "Digit:1",
-                    "At least 1 character from each of the following types: numeral"
-                },
-
-                {
-                    2,
-                    "Digit:2",
-                    "Fufill 2: At least 2 numeral characters"
-                },
-                {
-                    2,
-                    "LowerCase:1,Digit:2,Special:3",
-                    "Fufill 2: At least 1 lowercase characters, 2 numeral characters, 3 special characters"
-                },
-
-                // letter is mentioned even though that configuration is discouraged
-                {
-                    2,
-                    "UpperCase:1,LowerCase:1,Digit:1,Special:1,Alphabetical:1",
-                    "At least 1 character from 2 of the following types: uppercase, lowercase, letter, numeral, special"
-                }
-            });
-        }
-
-        @Test
-        public void testGetRequiredCharacters() {
-            List<CharacterRule> characterRules;
-            String message = "Character rules string for ";
-            if (characterRulesConfigString != null) {
-                characterRules = PasswordValidatorUtil.getCharacterRules(characterRulesConfigString);
-                message += characterRulesConfigString;
-            } else {
-                characterRules = PasswordValidatorUtil.getCharacterRulesDefault();
-                message += "default";
-            }
-
-            String reqString = PasswordValidatorUtil.getRequiredCharacters(characterRules, numberOfCharacteristics);
-            assertEquals(message + ": " + reqString, expectedValue, reqString);
+    static Stream<Arguments> configurations() {
+        return Stream.of(
+            Arguments.of(2, null,
+                "At least 1 character from each of the following types: letter, numeral"),
+            Arguments.of(2, "UpperCase:1,LowerCase:1,Digit:1,Special:1",
+                "At least 1 character from 2 of the following types: uppercase, lowercase, numeral, special"),
+            Arguments.of(4, "UpperCase:1,LowerCase:1,Digit:1,Special:1",
+                "At least 1 character from each of the following types: uppercase, lowercase, numeral, special"),
+            // Should say each, even if more characteristics set than possible
+            Arguments.of(2, "Digit:1", "At least 1 character from each of the following types: numeral"),
+            Arguments.of(2, "Digit:2", "Fufill 2: At least 2 numeral characters"),
+            Arguments.of(2, "LowerCase:1,Digit:2,Special:3",
+                "Fufill 2: At least 1 lowercase characters, 2 numeral characters, 3 special characters"),
+            // letter is mentioned even though that configuration is discouraged
+            Arguments.of(2, "UpperCase:1,LowerCase:1,Digit:1,Special:1,Alphabetical:1",
+                "At least 1 character from 2 of the following types: uppercase, lowercase, letter, numeral, special")
+        );
+    }
+    @ParameterizedTest
+    @MethodSource("configurations")
+    void testGetRequiredCharacters(int numberOfCharacteristics, String characterRulesConfigString, String expectedValue) {
+        List<CharacterRule> characterRules;
+        String message = "Character rules string for ";
+        if (characterRulesConfigString != null) {
+            characterRules = PasswordValidatorUtil.getCharacterRules(characterRulesConfigString);
+            message += characterRulesConfigString;
+        } else {
+            characterRules = PasswordValidatorUtil.getCharacterRulesDefault();
+            message += "default";
         }
+        
+        String reqString = PasswordValidatorUtil.getRequiredCharacters(characterRules, numberOfCharacteristics);
+        assertEquals(expectedValue, reqString, message + ": " + reqString);
     }
 }
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/URLValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/URLValidatorTest.java
index 3fe8501bbbf..8c29b609c9b 100644
--- a/src/test/java/edu/harvard/iq/dataverse/validation/URLValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/URLValidatorTest.java
@@ -1,8 +1,8 @@
 package edu.harvard.iq.dataverse.validation;
 
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
 import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
diff --git a/src/test/java/edu/harvard/iq/dataverse/validation/UserNameValidatorTest.java b/src/test/java/edu/harvard/iq/dataverse/validation/UserNameValidatorTest.java
index a9816f81dca..1e5f0ca0371 100644
--- a/src/test/java/edu/harvard/iq/dataverse/validation/UserNameValidatorTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/validation/UserNameValidatorTest.java
@@ -7,9 +7,9 @@
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
 
-import javax.validation.ConstraintViolation;
-import javax.validation.Validation;
-import javax.validation.Validator;
+import jakarta.validation.ConstraintViolation;
+import jakarta.validation.Validation;
+import jakarta.validation.Validator;
 
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
diff --git a/src/test/resources/META-INF/microprofile-config.properties b/src/test/resources/META-INF/microprofile-config.properties
new file mode 100644
index 00000000000..113a098a1fe
--- /dev/null
+++ b/src/test/resources/META-INF/microprofile-config.properties
@@ -0,0 +1,18 @@
+# DEFAULTS FOR TESTS
+# Unlike src/main/resources/META-INF/microprofile-config.properties, this file will not be included in
+# a packaged WAR. It can be used to provide sane defaults for things like unit tests on classes requiring
+# some sort of configuration.
+
+# PersistentIdentifierServiceBeanTest loads all the providers, which makes the EZID provider reach out
+# to the service - switching to example.org to not trigger a DDoS via test executions at their place.
+dataverse.pid.ezid.api-url=http://example.org
+# Also requires the username and the password to be present when used in production, use a default for unit testing.
+dataverse.pid.ezid.username=Dataverse Unit Test
+dataverse.pid.ezid.password=supersecret
+
+# To test ConfigCheckService, point our files directories to a common test dir by overriding the
+# property test.filesDir via system properties
+test.filesDir=/tmp/dataverse
+dataverse.files.directory=${test.filesDir}
+dataverse.files.uploads=${test.filesDir}/uploads
+dataverse.files.docroot=${test.filesDir}/docroot
diff --git a/src/test/resources/fits/FOSy19g0309t_c2f.fits b/src/test/resources/fits/FOSy19g0309t_c2f.fits
new file mode 100644
index 00000000000..bc76e4066ef
--- /dev/null
+++ b/src/test/resources/fits/FOSy19g0309t_c2f.fits
@@ -0,0 +1,42 @@
+SIMPLE  =                    T / Standard FITS format                           BITPIX  =                  -32 / 32 bit IEEE floating point numbers             NAXIS   =                    2 / Number of axes                                 NAXIS1  =                 2064 /                                                NAXIS2  =                    2 /                                                EXTEND  =                    T / There may be standard extensions               OPSIZE  =                  832 / PSIZE of original image                        ORIGIN  = 'ST-DADS '           / Institution that originated the FITS file      FITSDATE= '12/07/94'           / Date FITS file was created                     FILENAME= 'y19g0309t_cvt.c2h'  / Original GEIS header file name with _cvt       ODATTYPE= 'FLOATING'           / Original datatype                              SDASMGNU=                    2 / GCOUNT of original image                       DADSFILE= 'Y19G0309T.C2F'      /                                                DADSCLAS= 'CAL     '           /                                                DADSDATE= '12-JUL-1994 02:44:39' /                                              CRVAL1  =      1.0000000000000 /                                                CRPIX1  =      1.0000000000000 /                                                CD1_1   =      1.0000000000000 /                                                DATAMIN =     0.00000000000000 /                                                DATAMAX =  2.7387550387959E-15 /                                                RA_APER =      182.63573015260 /                                                DEC_APER=      39.405888372580 /                                                FILLCNT =                    0 /                                                ERRCNT  =                    0 /                                                FPKTTIME=      49099.133531036 /                                                LPKTTIME=      49099.133541164 /                                                CTYPE1  = 'PIXEL   '           /                                                APER_POS= 'SINGLE  '           /                                                PASS_DIR=                    0 /                                                YPOS    =     -1516.0000000000 /                                                YTYPE   = 'OBJ     '           /                                                EXPOSURE=      31.249689102173 /                                                X_OFFSET=     0.00000000000000 /                                                Y_OFFSET=     0.00000000000000 /                                                                                                                                                 / GROUP PARAMETERS: OSS                                                                                                                                         / GROUP PARAMETERS: PODPS                                                                                                                                       / FOS DATA DESCRIPTOR KEYWORDS                                 INSTRUME= 'FOS               ' / instrument in use                              ROOTNAME= 'Y19G0309T         ' / rootname of the observation set                FILETYPE= 'ERR     '           / file type                                      BUNIT   = 'ERGS/CM**2/S/A'     / brightness units                                                                                                                                / GENERIC CONVERSION KEYWORDS                                  HEADER  =                    T / science header line exists                     TRAILER =                    F / reject array exists                            YWRDSLIN=                  516 / science words per packet                       YLINSFRM=                    5 / packets per frame                                                                                                                               / CALIBRATION FLAGS AND INDICATORS                             GRNDMODE= 'SPECTROSCOPY      ' / ground software mode                           DETECTOR= 'AMBER             ' / detector in use: amber, blue                   APER_ID = 'B-2               ' / aperture id                                    POLAR_ID= 'C                 ' / polarizer id                                   POLANG  =        0.0000000E+00 / initial angular position of polarizer          FGWA_ID = 'H57               ' / FGWA id                                        FCHNL   =                    0 / first channel                                  NCHNLS  =                  512 / number of channels                             OVERSCAN=                    5 / overscan number                                NXSTEPS =                    4 / number of x steps                              YFGIMPEN=                    T / onboard GIMP correction enabled (T/F)          YFGIMPER= 'NO                ' / error in onboard GIMP correction (YES/NO)                                                                                                       / CALIBRATION REFERENCE FILES AND TABLES                       DEFDDTBL=                    F / UDL disabled diode table used                  BACHFILE= 'yref$b3m1128fy.r0h' / background header file                         FL1HFILE= 'yref$baf13103y.r1h' / first flat-field header file                   FL2HFILE= 'yref$n/a          ' / second flat-field header file                  IV1HFILE= 'yref$c3u13412y.r2h' / first inverse sensitivity header file          IV2HFILE= 'yref$n/a          ' / second inverse sensitivity header file         RETHFILE= 'yref$n/a          ' / waveplate retardation header file              DDTHFILE= 'yref$c861559ay.r4h' / disabled diode table header file               DQ1HFILE= 'yref$b2f1301qy.r5h' / first data quality initialization header file  DQ2HFILE= 'yref$n/a          ' / second data quality initialization header file CCG2    = 'mtab$a3d1145ly.cmg' / paired pulse correction parameters             CCS0    = 'ytab$a3d1145dy.cy0' / aperture parameters                            CCS1    = 'ytab$aaj0732ay.cy1' / aperture position parameters                   CCS2    = 'ytab$a3d1145fy.cy2' / sky emission line regions                      CCS3    = 'ytab$a3d1145gy.cy3' / big and sky filter widths and prism X0         CCS4    = 'ytab$b9d1019my.cy4' / polarimetry parameters                         CCS5    = 'ytab$a3d1145jy.cy5' / sky shifts                                     CCS6    = 'ytab$bck10546y.cy6' / wavelength coefficients                        CCS7    = 'ytab$ba910502y.cy7' / GIMP correction scale factores                 CCS8    = 'ytab$ba31407ly.cy8' / predicted background count rates                                                                                                                / CALIBRATION SWITCHES                                         CNT_CORR= 'COMPLETE'           / count to count rate conversion                 OFF_CORR= 'OMIT              ' / GIMP correction                                PPC_CORR= 'COMPLETE'           / paired pulse correction                        BAC_CORR= 'COMPLETE'           / background subtraction                         GMF_CORR= 'COMPLETE'           / scale reference background                     FLT_CORR= 'COMPLETE'           / flat-fielding                                  SKY_CORR= 'COMPLETE'           / sky subtraction                                WAV_CORR= 'COMPLETE'           / wavelength scale generation                    FLX_CORR= 'COMPLETE'           / flux scale generation                          ERR_CORR= 'COMPLETE'           / propagated error computation                   MOD_CORR= 'OMIT              ' / ground software mode dependent reductions                                                                                                       / PATTERN KEYWORDS                                             INTS    =                    2 / number of integrations                         YBASE   =                -1516 / y base                                         YRANGE  =                    0 / y range                                        YSTEPS  =                    1 / number of y steps                              YSPACE  =        0.0000000E+00 / yrange * 32 / ysteps                           SLICES  =                    1 / number of time slices                          NPAT    =                   12 / number of patterns per readout                 NREAD   =                    2 / number of readouts per memory clear            NMCLEARS=                    1 / number of memory clears per acquisition        YSTEP1  = 'OBJ               ' / first ystep data type: OBJ, SKY, BCK, NUL      YSTEP2  = 'NUL               ' / second ystep data type: OBJ, SKY, BCK, NUL     YSTEP3  = 'NUL               ' / third ystep data type: OBJ, SKY, BCK, NUL      XBASE   =                    0 / X-deflection base                              XPITCH  =                 1521 / X-deflection pitch between diode               YPITCH  =                 1834 / Y-deflection pitch                                                                                                                              / CALIBRATION KEYWORDS                                         LIVETIME=                33333 / accumulator open time  (unit=7.8125 microsec)  DEADTIME=                 1280 / accumulator close time (unit=7.8125 microsec)  MAXCLK  =                    0 / maximum clock count                            PA_APER =        0.2462417E+03 / position ang of aperture used with target (deg)NOISELM =                65535 / burst noise rejection limit                    OFFS_TAB= 'n/a               ' / GIMP offsets (post-pipeline processing only)   MINWAVE =             4569.102 / minimum wavelength (angstroms)                 MAXWAVE =             6817.517 / maximum wavelength (angstroms)                                                                                                                  / STATISTICAL KEYWORDS                                         DATE    = '22/04/93          ' / date this file was written (dd/mm/yy)          PKTFMT  =                   96 / packet format code                             PODPSFF = '0                 ' / 0=(no podps fill), 1=(podps fill present)      STDCFFF = '0                 ' / 0=(no st dcf fill), 1=(st dcf fill present)    STDCFFP = '0000              ' / st dcf fill pattern (hex)                                                                                                                       / APERTURE POSITION                                            RA_APER1=  0.1826357301526E+03 / right ascension of the aperture (deg)          DECAPER1=  0.3940588837258E+02 / declination of the aperture (deg)                                                                                                               / EXPOSURE INFORMATION                                         EQUINOX = 'J2000             ' / equinox of the celestial coordinate system     SUNANGLE=        0.1225114E+03 / angle between sun and V1 axis (deg)            MOONANGL=        0.1191039E+03 / angle between moon and V1 axis (deg)           SUN_ALT =        0.4515910E+02 / altitude of the sun above Earth's limb (deg)   FGSLOCK = 'COARSE            ' / commanded FGS lock (FINE,COARSE,GYROS,UNKNOWN)                                                                                 DATE-OBS= '22/04/93          ' / UT date of start of observation (dd/mm/yy)     TIME-OBS= '03:12:17          ' / UT time of start of observation (hh:mm:ss)     EXPSTART=  0.4909913202874E+05 / exposure start time (Modified Julian Date)     EXPEND  =  0.4909913505303E+05 / exposure end time (Modified Julian Date)       EXPTIME =        0.2499975E+03 / exposure duration (seconds)--calculated        EXPFLAG = 'NORMAL            ' / Exposure interruption indicator                                                                                                                 / TARGET & PROPOSAL ID                                         TARGNAME= 'NGC4151-CLOUD2    ' / proposer's target name                         RA_TARG =  0.1826357301526E+03 / right ascension of the target (deg) (J2000)    DEC_TARG=  0.3940588837258E+02 / declination of the target (deg) (J2000)                                                                                        PROPOSID=                 4220 / PEP proposal identifier                        PEP_EXPO= '174.0000000       ' / PEP exposure identifier including sequence     LINENUM = '174.000           ' / PEP proposal line number                       SEQLINE = '                  ' / PEP line number of defined sequence            SEQNAME = '                  ' / PEP define/use sequence name                                                                                                   END                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             '3C&â1f����&¡ŸU&"£1����&aƒ9&ˆ$…&+#&žž÷& ¹Ÿ&cm&¬Õ&3û¥&šP&ˆC<&Zqœ&LwÞ&<’�&Çb&„%&öÚ&N6†&.5Z&2ª&eÙx&¯µ&xþ&|‹~&;ì&}iÒ&:SÎ&„Û&Wô&?í&+M!&I¹_&cµI&;µ&^Ô&Uñ&Sd&^¢j&E2&<KÁ&<†—&!Ö&mƒ&1àä&1Å.&V&~&lq¤&e’&^V¾&F&ŒeÎ&½»³&§¯Ì&ŽÀ&{o³&]k®&nê&e‰C&6ØÜ&I.Â&jæ&iœ"&†Ê[&pŸm&•‰—&¬^é&›²&†‰&Œà&d<€&cÍ
+&J7,&63ø&4z²&To
& +&þ&a“&Iµ0&W28&9\}&J+/&(µm&BL¾&(kC&(ù&Jö¼&|z&3å&ó×&â€& Ñc%â/M&•â&AS|&G²&'Ì~%ùE¬&@™2&72¾&?Öh&&­ï&9õ²&R@€&z3&?%Ž&M0&EÁ×&7"&Gq&O[Ñ&’Wx&oH¢&[(–&’1&&h�5&r}Á&rxy&‚"K&nOs&@Öœ&—¶/&uà&#^&Ó&_Dí&‘;û&ˆL±&w•ô&iuè&Që±&a6ä&mdÂ&R¾ &^÷ë&\µ&@g±&sÍç&Ž°;&„¡Ý&f‡œ&Ž:¡&0ú&b9&ce&ssE&Dé—&zL&\Š4&V`2&sñÔ&tôÜ&dît&„É&x)&fA0&eFd&{_?&…¯Š&dÙ&Yà&uµÃ&t~ &ƒêñ&Û©&3¢ƒ&„‘M&–òF&–æ&”Õj&‹Ïž&‰H}&–˜š&¤óU&±«B&p O&¥vÚ&¤­&§Ë…&¥$0&žW°&§®&šË€&ªÊ¿&ÄŸ'Þe'	Z&ø`‰&ó¬4&âÔ}&Ê¿<&ÁÚ&³ºI&²`Ÿ&¤ó&½Kl&²´´&°ÀW&Ƥ&¿õ	&Õ¯&Æj&Ø?Ì&ß=I'
+–¦'æ'ϵ'	'V÷&êc«&ÉEW&ͯú&™¡9&¥,‹&¦æ9&¯Ç{& ø&«ð…&¹±&ÇÀV&ºæœ&ÅáÂ&šXE&–6V&ºJ&Ý×&¤È&†Ò2&Ÿdÿ&‹&ª È&™:Í&œü@&®òÊ&Œž4&`,W&ˆ×&ze&”o&’M&$ä&Ÿ¥ø&xç&…%i&£/&“¤v&‰3‡&€>c&r-.&mK&—™&‚ &Šµ&RWù&zì&†$ƒ&†u&~š-&yÕR&y§&z³¤&Mvñ&iŸ7&{yë&\¹&&‹p/&t9&Ep$&dƒ*&TS&Xi:&zI²&7�¼&qU@&qÓ.&U˜ö&€Æi&[3W&Oq.&n1y&Z`&&_‹&OM£&hþy&š!&NÙ¢&Sv·&SµH&*vy&M׏&#_v&<H&AÝÅ&ý%÷N´&T¾V&Nð§&C\4&a—‚&M!&-÷½&J›€&J&áN&ô&k„&D3½&	òÙ&2…&-®&>—ž&2³Ë&,5 &	‚Ù%¶º¤%ßí©&%»¸&2Ô
&34Ž&&Ї&
+ȱ&?2¾%äÂz%¼†&™˜&áè& €â&%¹í&	8&AÀO&í\%ßj[&A£¢&V:&*‘I&)çí&K.m&€É&¿à&A©é&îü&F"&8ìY&~o%ûk%ëb’&ÄÙ&öI%éÓ%&¼µ%üé&¸®&%2–&7ŸÔ&¹½&=8%à£â&%zÖ&ɬ&#Ò&â¶&²Ó%ýNL%Û“K&.´;&Qo&æ&et&:¨&`
&#!&	Ñ%òâ­&71&�jL&4¶$&=&†Ò&q&@dÁ&}Á&/Ã&=O&ÙF&+Üä&%³1&G/&+ö&6„æ&;Ø&NÙÛ&%«ä&¼@&+“}&6FÃ%úa@&2jò&2ñ„%ìÊ«%™n-&eó&lÍ&7§&	~Ë&‰&b&<ê%äå”%æ¢9%÷Œz&
+ê%Ô˜T&Ä%äîv%õ#I%õ§&ú&˜\&Ü&Re%¾]¸&�<ß%â½Ø&”5&9ì%òî•%âë&¶&&‡¤&*ó&	p“%Âf&\&ôý&Ø&*¼´&+k6&e¾&Ó &/Â#&=×
+&O &F™&0¬&
r«&A?Ö&Ÿ+‘&•g&«‘&f;S&=aÁ&4è!&\3%ø-Ó&fº&0ÂÛ&jº%ïQ%¾!k&�c&4Ï&´%¾ký%ïÔ5&˜í&4×%î	|&üž%Ýé%&N×&-=3%ìÜ}%»å&+âQ%̸š&"&öù%ÌÃ&1f¶&
+&°ø&7%ìØ‹%Ü»é&ÒÆ%麄%¦&¸™%¼=%̶&&×q%öšÃ&nÃ%Ú—&/ð¶&®Õ%ú1†&
+Cj&¡ï&Šƒ&R%ê‘&4%&hV%ó‡%Äup&ž‚&‰>&šø%åEü%Ö:&Ù&¥%Æ5%ä:Ê&™}&`4&íã%ü5&
+È&¹&&
+&n%Òôt&Ã.&	*%íñ`&(þo&É&š"%ÿŒa&�+%ä=s&ð%ü{/%ïÝJ&Z{%ð%ýeè%Ôš:%þ°½&^8&L	&äŸ%ÓF%íå‚%ú0Ú%íº§%ú÷’%ú¦J&	(.&®Y&$|©%üÕ<&@%Ñî&Ü®&Âs&
'_%ê¹&%Ç&–%é¡Ö%õàð%èŸà%ö>/%÷gÃ%ìq& Ü%ù[Ï%Ï{<&™q&rå&
+/%™<%å:ê%ñÓá%þKÊ&
+û#&„æ%Í‘&§¦%çïÕ%½HÕ%ÛD‡&ê¯&’„&Õ&Aƒ&‡&
+tÍ&€i&8§&—&]I%ûî%ãø&	P&o<&
+^&�ë&!%Þ�&¿%ÎRù%Ûq�%¬Uá%òbD%ýQÅ&Ð/%Èfd&ù^%öbî&)>ü%Þ®ò%Å©Y%ù!ê%íE‰%żJ%ì´%Ó{b%ìß«&W&�‡‡%PãT%Яc%Þ-%%µU%èß%òÀÌ%Σ
+%ò&9Ù]&Ã@&žý&³û&
ÝÜ%þ>Ð&Ïù&Œt&�À&%ëg‡&,2%ô—|&û	&"5e%ÙÇÞ%ñ×G&
cz%Ι%±íK%ð›o%ûiÇ%åÉî&™õ&ÊL%O4£%Ûa%ñÚ%ÚnÐ%ü˜
+%æVÍ%¿£!%ùÃ%ááÑ%Ö£í&Tû%úðþ&-&x%ûد%ú]à%Ëo*&
%í Q%Êh%\ñ%ãb&ó&+è&'¿6%ÉZ&ˆ%æsí%ÎÂÔ&G,%æ%N&Ü¡%Ú5%¡hr&;¶%½„¾&&ÀÛ&�8l%ë}@%ë?¾&‹%ëÿ&£d%ó^
&L[%é@_%ÓnÍ%ßÒÊ%»dE%êI%üð%缶&~_%ôú&·Ø%û5G%åÞ&²ž&d©%Ñ\ü%úÆ)&¹H&:%µ|ô&�V_%š%å¯%&³o&¾å%è)ï&-%ÅI&†~%ðÛ%Ä–†&iC%åüÐ%ÚÚm%ùÂ%ÛÇc%ò:&ȍ%Ü`%ïÃ-&ý2&#R%øÜÌ&
~q%Íl%˜!î%ë5À&«©%îþR%Φ“%¼ô”%ÿ-F&
@Ë&¹ä%ϾË&þ&À&
Ó&Õž%àî%’Zš&ôm&A%ï_H&%áÇ%&We%Ù©£%´èˆ%à~F&ij%æ3Ò%ïf8%øv:&�ÁÓ%÷èÅ%îÐ{%¯%»ïE&oN%íõ%Î3Ú&ø&÷Ô&ãQ%û-h&Б&™%ºï&I˜&+”&*1½%÷è%Ê:%ê]ó&Z'%ù¤Õ&5pi%Ì-%ðjm%æs!&
œ%øÞž%ÇÆ&âG%ìԝ&¤¤%Ø+û%ŸÙo%¬“ó%¬½%â¥c%õüU%Å5B%Ð.˜%¹ÔŽ%ϸF&gã%ö;?&æö&=j&
ҍ&1&·‹%èÔ¹%ûÖ%Ìð\%éÊJ%«C&
>®&œ%ž¶%ÕŒ&le%ðï“&ä_&_%ùõ%&FÖ%þ %&y5&É&
+×Ä%Ä:a%Ę%ÄÁV&+mÔ&íK&íF&y3%ý„æ&1ªË&9]U%ÁŠ%ß…ç&t	&	a&·D%ï‹Ä%çjg%çeQ%ɪ€&Jy%äU#%³ÅP&Z³&�Ù%ÿÒ@&¼y&áÂ%œ|b%ö<D%äÐJ&¸c&V;%èÞe&Ѳ%æ0%ôˆˆ%ú»Ð%ú„
+%Ð1q%Ç;%ôˆð&	A«&~Š%Ù´%âH˜&žp%ÚL%ó_%ò;â%ù\!%èÜÂ&«1%ð¹;%£¶ç&1$%ËvÊ%ÜÚ!%ícÞ%ÝU}%þ%çÀ%%Ätá&
+ËŸ%èFÕ&ë%éoÙ&�ôB%úe[%áw%øû‚&Á%ÿä.%ð–& ±%åÏJ%¬wn%ãÀ%ÉÄH%ÛÜË%Ê P%órd%ÀsÊ%«äB%ÀÂ%ñ™³&�8Ë%éÖ&�m5%Ðͱ&=”%ðê%ñeâ%ê\ç%âØÙ%Û`0&f+%¬œ5%¶È%ëà§%ÊC•&åÃ&Çó&'+%ÏÙî%ß{%çé8%ÑEu&£	&z%·	!%Ò|%ât&1%öñÆ%ç°i%Ïϯ%´Š�%´m7&‚_%¼E?%±¾8%Ä8%»O‘%¼–}%èaM%ð¼Õ%öÊ¿%åáˆ%ú¡
&�ã–%æÐf%Ç­|%ãk²%¶‡(%é^]&žë%ú@V&ø%äzÏ&�«Y%Ìú%ùkÿ%øTb%â8Â&q¿%éHð%ð’D%øà%ì&ý%ózW%Óä½%ñtµ%éÞ[&
;%Ô¼%äB?%ú4&¯î%¹ßj%ëŠ%ä7%ëÛÑ%ëß%ëˆ	%òO%ñ€T%Ë%ãÁ%Ô1Ñ%Úù¶%ï?R%æ£'%æc2%Ð&Î%ïÇ%®¡œ%ÉÖ·%Éi`%À#$%õ>%™–%ѝú&Iª%Óõr&2¡%ï(%Æ4%ÄJ%Ó!¡%Ò {%õª¦%áó›%ÛŸè%Í9¹&Œæ%d­á%µ¶w%îà& i%IJx%­Ä%˜ÓÌ%ÇäA&Ô×&H{%Õ©%Í^“%ãYà%ª8²%Äß%ö\ë%ÚZ±%ü72%ËÑ%¨ŠI%×óÕ%Ö” %®a­%ÕÛ­%÷©e%Þ£ê%ßyœ%Ê$ö%Ò=Ó%õ5y%Ë=å%ÚȤ&óK%Ô;w%éWm%ºî°&�«2&ª&�¦%ན%æÙ!%×1 %¿v%Ívõ%Ô¿%î2¡%î0H%›Bó%ÕS¯%êw%ýô%×9Þ%ÈÁ%%׈T%ìKš%§5H%§S}%Ðh±%¸S5%Ð+ù%°‡h%ÊdÌ%Ò+%æXo%Ö¼™%¿L%Õö¼%®þa%¥Î¥%Ƙ7%µÝ%îÏD%­¿C%ã¸I%ø%䀮%üô«%ÅÙ=%šJ%»Ë%êŒ÷%×Jo%öO%Öù%Á[¯&Md%²­b%¢ú„%áiI%®d%¾q%¶½%Ìü0%×ö;%Ø!$%§§¿&ùÃ%ÿSü%â"ù%¦£&F±%Õ£«%¿S…&ð&Ë&&ö%¿,%¾pÙ%Þ³%°­0&38%æ}Þ%ô4%â™j%Àøh%ê?ç%Þ‡r%þ&%ßlT%ëùÿ%ØRª%ëé&Iï%ä:Ð%ÐxÏ%ªŸy%ñ™Ë%м%ûâ,%Át%±•!%Á˜Þ%ׂa%×r%Á‘Ê%ÈÖ%¨Í‹%ÎÇ®%¹%ÎÝß%ù÷Æ%ŸËš%î­×%ï°$%ÁÁû%ê(N%ˆG%áÏ3%¾Ì¾%¶¾	%Òj%û¨%”>œ%×áÊ%ä¢{%ÒÂ%ÌI£%ß M%ß=˜%õ:,%з5%ô›±&ô%î¶Ö%áü%Η%LJ¿%àVˆ%ªeâ%ë+u%ß6&%Ì‹‚%Ò\%ÒÝV%ðq_%Ô«Ý%«â%èÃ=%âõ%Ϧ¹%Õ%ñ%%Þºj&‡<%âyº%Ê„¶%Ë"%ÑÕf%Òd%äQ¥%å(ª%Çã%Ǩô%æ|%ª›U%« Ê%Ï®‡%»—ó%«|Ú%Òþ¸%ý›]%¨[%¾ƒ'%¸Ó§%Ó¢%û8‰%¸Ux%ÿ‘%%êtÀ%ÍŠ%Ó–r%äòj%ôß*%¾Àß%ùÜ­%˼%%Å‚%Ë–:%ï%™ß,%ªýÙ%Îü)%ÁÛy%íF×%í³|%´“e%×íœ%ÑŽA%¼ë%£Ï<&ÿ#%Þ“H%ÄÒì%èàÐ%ÅL™%Øýä%ê©ð%Ìï%½±Ú%ÐM%µï•%Ä)ª%Ķ5%Äû!%Ë[% ¾%½ÿ=&Éù%âCW%çhN%á	Ê%ÂŒå%àZË%»ÀE%Áò§%ÍÓ‚%ãÅ+%¹‘%ƒo%¹ø¥%¬Þh%ºý%¬3±%¿!^%ÛÄË%á|%¸œM%ÑoQ%Ë*4%á>þ%Ê7;%àuu%Éx%½)W%ÎÑŠ%É[á&©p%·ÏF%ö?%áTJ%ÊU3%꽡%Éý²%Õm%ÛÙä%çWý%ã.Š&ž%¹eÔ%Ñ:%ÊiÃ%Õ$H%¶|ù%靇%ÎU¡%µë%Þ¸u%ß&%ÈóÆ%ô�Í%ßÆ\%½”¾%ô­%ô)G%ÙnÓ%Ù™Z%ÔxS%ÔŸ–%ž%íÖñ%èÌ%÷Åf%¨uÝ%¼šÕ%ßS¾%Îâ¡%ów‡%ãõ%ã5[%öì
%ãZŠ%Ùÿ€%ÚˆÔ%ýÑ%ÂW‘%Ýs1%»ï%Òï5%Ùg¾%Þ‡q%è%‚%³¹%ê,%éQ%ޝ¡%Ôm%½»ç%Ú)%ów£%¥ÅV%Ѓ\%Õ˜Ä%«œ*%Âçó%Ǻ%Í.ˆ%ÈÙ%é,0%éü%Ŭ¯%Ñu%ç3«%×UZ%þ5%¬°b%ô£%À#%À¡i%ºê¥%ÌGv%ïÇÊ%ļÕ%¤”%è{•%Ùf%ß
‰%Ôÿ%¿S%Õá%ÛG”%Å„Œ%´„%Á;%ÇRb%ÌŒ^%ÀÈ%ú"%똛%Ÿ5	%Ø˜%ÑSó%Æóœ%ÝÔ…%Þf]%Èå%¼´%ãp&%Òu%ÐÊ%Ù2m%êI¯%ãÌ%­Û>%ÕÃÚ%Ö\”%ß»Æ%ñyà%³…k%Ê?ÿ%ÄÌ%ëôl%ÃÃÙ%Î%Þ½%Ô½Ž%è³%Ï5[%âÒ%ø«%ÄN]%ß;¾%Æ”Ñ%›žŽ%Æ—A%À‡¤&ч%Å`Ý%¿­°%´\Î%ÖM–%üu%éð\%ñÂã&
+¬x&Ú%íâ‡%¶ó~%Öµ%Òº¯%˜Çy%ø@%ÃO€%ÃaÉ%È´{%æ•,%ÎE±%¸·í%ü¹Ÿ%Ø	‘%×s­%ÛÛ%¼Ü/%Ç&%ßÏh%èT%ù#á%°1Í%ãh¸%©Ù¦%Ù%%ÊcÆ%æ§Ù%ØÅÍ%Üç¬%Îqk%áÄC%Ùwã%ñí%õ¿ë%ä/%Ñ%õsE%ÁbA%Æp%ÙX_%ÄžU%öž&%ÜIÑ%ÉXÄ%á`Ï%ÅB%µm%÷4ƒ%©‚%Ù¦õ%í±`%Ñ™Á%át/%ñ	¤%Î0ý%å/P%ùbM%ÆÀ%«G’%Õ«j%êã§%ÍHÚ&&ÂÓ&J’%º$%í1j%Ú¦|%”vä%åvÛ%À!ú%Ú?†%ÕÿC%¥Ü%·ÃP%¸c-%¦V{%¬0Ê%·”D%ÑŒ%ÁV]%»Xë%Ïíê%ë\ö%ÎÏ%ÛÒ#%Ñí.%ÂÏß%ì{%ð@%Â:B%ãŠ%ˤ¥%¼ë%»Í%%êS¨%êC$%þx%¶hœ%ÔÑ}%ÞC%Úø%¼{%Æ°Ö%·š%ÙØì%“Åà%ãñd%È™ê%¹)Û%äéš%Ò/%ùe%õ^Å%¸’%Úñ%Õõý%ã¡ã%È+=%ÍÛt%åªç%Κf%î±Î%Éö$%ݝ½%æ§Õ%ÿÈß%¿«%ÓØ<%ò³Ÿ%É%%³*‘%­Ø%ç’Ó%Œ¥a%ÿ-æ%ÝîV%¶}¢%ÄîW%ÒhŽ%çžç%ÈKT%Õoš%œÑR%ÞA×%ð4q%Ϗ%Õæ&&&%î@ˆ& Ù‘&Ý;%ç{’&5%ýwZ&Ô&‘‘%ôI&Sï&
$&&ï¢&3!Ð&NM©&T‹x&v~h&‘.°&…ž&•Ä?&®à &Ùà¦'
+¾»'/	Ÿ'<¿D'EY',á‡'ó.&Ú¦í&±â±&ÀS&o U&d`„&9Ñ&4¤‰&-Ãn&)=Ô&س&9~&/Q%ì$õ%͈&×Ü&<%æž%úiJ&�âA%î²H%òÞ%Ñm·%ëN\%þÄ%÷;˜%ãIÖ%óÇ÷&’&#Ö%÷¦Å&Ö’&ï&…´&2²¥&2»Y&5
+s&Lk1&p”U&“œ&¿w´&Éî„&ÑžÛ&¿½H&™À^&~Á&Në«&?÷&0M›&%ù2¹&	LÔ%éÐ}%åö&ƒï%õiA%Ý�7%ÝZ}%îxê&à~&¼%ÇzÀ&2%ÆH&­y%ÿô%Ó—-&q‡%迸%ø%Ó¼'%èÏ	%üW%­&@ï&à*&ÑY& r&Óß%Ü!‡%þÙœ&
9ƒ&ºU&ƒò%÷¨Ý%ø¯%ô]ž%ì;Ø%ïØ8&
W‘%ì‚%ÿµ&p%ä§ò%Øš%ìŸÞ&øß%øR%ûÐÝ%ôDµ%ô=V&ƒ.%ìmº&ò‘&xl&<ä%à1'%÷Ó<&ô(%ì<ö&
eù%àx&¡x&X&®}%ïû'%ïôP%ûDæ&ð&$
ì&
=ª&¥â&4¶&¹K&f%þ¶é&*Ùf&Æ&%M
+&Fbó&N_*&Sâ&Or5&P‰I&1Y&,Ú&ý¯&ûË&ͪ&|%öï§%ïO¯&èé&†(&�çŠ&æL&[{&¤U&Ww&6%úcË%ë-&#%Ö¤+&>Ä&¹%&ñ&	a’&>ú%îÞ%É5%ú’%âò%%ÞÕk&Òç&&É&’%ò}Ù&p%ÖJj%ùàÒ%Íš&Ý…&G›&uÏ&�~ä%ý^ù%ù³%ÑĬ%ê%ùŸ|%¿nµ%ÞfÏ&)S%Ñ£Ÿ%¿Z%õÅæ%Õå\&
+›(%Þ=À%ù]±%Þ1ù&É;&e‡%Õ½§%ÞS%Èw%â! %¿Ž%º<á&î%ñ¤º%Ñ?Z%º(æ%ÈL%áñ%éØô%åäŒ%È6x%øÞñ%üzA%Ÿ©÷%åÅw%üe»%Ã|G%ȃ%ÝœÔ%᧽%íÔ`%öy%¦}Æ%÷]%«õL%ëPà%Íáˆ%ÉÔ%ÕÈÁ%Ñ{’%Ä¡%¿©³%ù­¯%Ñh2%ôâ%è͝%ì
X%½L%Æ'ö%ʺ%Ô+%ÇS÷%÷ñ%ó•Ù%ä�&	’%þ@Ò&�Ò&|š%ê³ö%ÚÀY%Úr‹%¨Ç%ß÷i%øE%¹Â>%Õ@z%¹Ùn%í	ê%ÜùS%ÏÄq%ïP0%ßXu%ßSs%£¤‰%ÆN™%ú!„&é!%Û¼!&ÿË%è„%óx‚%ûIÀ%ó¶¾%Ïõ%Ë{å%ˈ´%Ç%Ø1Ð%ïáË%çßT%öÓæ%ÓVò%ýxŠ%Îh‘%æ9%Ñó\&q…%ÞŽq%æ;8%ÿwK%ñô®&â6&n&Xì&¿Ï&FC&•1&!'%ßÀ¯%íÄ%úmV%ã·"%ðzì%º\Ï&�Ï&=s%ÛÖp%çsZ%ì»%æwr%øN%æy'%Í%ø€¦%Ú¸r%ó-…%䨔%ÔD`%Ü '%Ë_%ïph&�hW%çFs%îú²%Óy%Þû°%΂%Þº%×%Ø�¿%ô%A%ÜèW%ÂBo%Ê€I%Àâ	%¬èJ%í%û©%Òv1%ê†%Û%ÚÔº%ê­³&� æ%ß-4%ůW%²»W%üö%ÕÌY%á!=&�~÷%Ôb%ä–œ%ÄXÀ%Õ¼ƒ%¬sy%̘e%¿?|%Ñ*%Ùáø%É¥f%Î5ƒ%Òr†%²?8%Ú'‹%øÍ%ÍG¡%ÄÁ%ÌŠ-%èÊË%Ä‹%²%»e¦%ÙFß%ÌÑø%ÍÌ%¶“~%àöR%Ü_%ß‹Ž%Óo%æ‡C%íºw%ÞiÆ%åÎ6%ÁŸ’%ª°V%æ¹%ÎËÍ%ÒRO%Ê4!%׉e%«óÐ%ð\ï%É*À%ÞÅ%ÉC:%ÑWž%¿µ%Ìð¬%éï&%äi¾%åMl%ìo‘%R§%ê™&z[&í&ò&6žK&-Õ¼&Pí¶&iO&ŒV&P¡„&P›&Ä)¯&ŸñN����&‹ø&5Ր%å0J&
+&@ˆè&kÀ²&QÕ÷&cL±&DªÂ&SÂ3&	vÛ&1õê&2há&Žê&R&Vß&
+z&ÆÀ&öÚ&Љ%ö^+&2ª%åÙx& ¥o&yä&%ô\%åï&ö'%üIØ&@%ýk,&•Œ&
+†%üßã&ü(&=ñ&ˆ3&s&d&Éa&<M%õ‚&NÎ%ÞÅÒ&Å%í+Û%ûg±&AØ&5	&)ȉ&�&& �&ù&WÙ_&s·{&WØ&Hî×&S?°&(.Z&·ƒ&5vÛ&ID&to&(C+&Çæ&*x&;Ö	&YºÖ&hÄ'&h¢&NÌÃ&9Çè&$ &ôú%÷©’%íJ%äJM&
ð5%÷Zª%÷<&/&3&<ƒ%íl&-Ä&É~&�3%Ò.&�¾¡&9%Ë·&%ÜSÞ%䊞%änL%ñ:%Û�¡&}ý%ÿ¿%ì,i%óÎV%ìzõ%òY%÷@&àÎ%ë¸d&–E&	¤h&7#&® &™`&Í%üî¹%÷`Œ&"‡&F%È&)2û&Ë£&&ÿ&!&tT&/�Â&90¢&4¸Ž&/dC&ä¡&?æÙ&3×ò&"îž&B½U&(K¨&6~ñ&5;&85­&ne&ãÕ&~î&„&ƒ¥&©™&H‰&h­&&ç&7’Ù&+é<&74Ú&5	&v1&"+–&‹&..Á& &2Å¿&"P&;wW&2GJ&+;&,3-&37u&3kw&-1÷&$=À&-×&Boó&2
+i&  9&7Yt&&ây&'ª&7#O&1Àø&G³	&E&u&Oî&<ž&Rë&B%Ð&Sg;&iFy&e^£&HVç&\É&S‡Ë&xš§&k�&m()&eøÒ&OÿÛ&€½B&ŠÞ&³$2&¼,·&¶’Ü&ª`œ&Õ¶&&¦&ƒùô&ið—&aÞ&ƒp‡&z0/&…œˆ&†­È&‚\Ã&•ƒÕ&üü&—ßä&¢I¥&»^¬&ÇËœ&É/ý&¾¦&°c&›‰Y&ˆ¼”&ŒŒS&]ƒ&l;&„Ò|&tÕ&xa„&ƒçì&ˆa¶&ŽÝ¯&ŠðÈ&ŠÓ~&a)ô&M!B&<ƒ&_AÍ&Rz–&=ù&Z›(&P‡&bdÙ&MI@&U»õ&[&&Ueî&5e!&L{S&C^½&\ù	&Uöö&N½ÿ&cî&2Å3&<L&FÓÉ&H'{&F‡&:(—&0N¡&'¨â&XIm&9šÖ&4°â&-ýq&4lè&7|&0ê_&5¡œ&$Ï»&+Ž²&(ÝÒ&$ð&&ûK&$ö&*Jª&/º0&'Ža&&%Ô&&+“&u’&71&&·ô&,Gÿ&)X&I&6‹&ÿŠ&]'&„¢&ÌA&ú—&¤Ù&Cî& õi&³&‡&èæ&yÍ&0J&ý¸&-&ý¾%éý•%ºòd&n°&èß&
+#ø&û%ù$a%íü&x&Š%ϸF%˦%Ú¨À&Ó·%ͤE&
+Gé%ýBì&�¯¤%ïÁ%÷Ú
%±†®%¤µ²%Öe"%åÔ¨&™%ëïI%ÙÆ%ÝÜŠ%õa+%®·Ô%ÒƤ%ÝŒî%ÜD%âüt%ê_R%ÖKs&+‘%Ú£ê%Õçl&Ù%Ö%õ}%ÙÆw%üõo%Ë|›%í¾ì%ñŠ…%Æ]“%λ$%à@œ%±ýë%Á»%·i%Â\›%ÝÓÔ%À=%Üu%²:%Ó°§%éŸì%å‡É%àxÉ%ñ¶Û%Çà%ê%Ö©%ì?%Ø»õ%ÍÁb%³C%¸ÌØ%Üü-%Þ›D%ãÏ®%Àä¼&iÚ%¿1%ÏUs%ÁÆ%«¾÷%Çf%Ðt%÷s%ØMp%Òc%Ɲ>%ñÁ!%Ø[v%Ë^%ø!W%ë¯\%âD™%Ým%êºç%Æ…K&~%õð&	´c%Øê4%û}%ö§þ%îø%»Èð%Ú„&%ÑÔl%ìÊ«%zŒü%ÖÄv%Ë­)%Ëe3%Âr%¨…2%Ïãô%·;%Æ:þ%Ǽ%¯%Èå%Ë‹S%Æ[%Á{%ÏÓc%Ô>"%Á¶Ò%›Á%¦e=%º¢^%¤Ü¥%ªûÔ%”oí%²Ö%¦Ô”%»1%«‰%»<%§òÈ%Ç!Ü%³ºl%­Ù\%¨Z¥%ßhë%ë¸ü%×ÖÆ%Ñ%ìH×&Î4%éúE&Î`%ñHM%öøÚ&©ô&|T%ú©&+˜±&aÂ&DÊ7&P&æ&)D&
m—%ïQ1%µF%¸ûQ%ÊÌÏ%Ãj´%ͼ,%Æ7Í%¤¨k%ÈDh%°I“%áï%´¦f%½™÷%¼ïi%œ©%­q%ÑÈL%§¿®%Í%ÚS-%»AZ%¬p�%ºo8%œ[º%ÔÆ%§d%±T=%Õ5²%ºÃÊ%ºè×%Àì%¬¡.%«þœ%¹%ª[†%¦%»²+%‹mš%¡Ö²%âÔ%³'o%·÷=%¯C%Ë(Š%µƒ-%°éÙ%µ¶°%¹˜ª%Â[%¸Ò°%¥Ýi%Îè†%©“¬%¢°% hk%ŸñÜ%É)%Õö°%¾|%¡ÇA%ÊA6%Áì%´ð�%«,%Á/E%ÀÜn%ÈYõ%© %¾C‰%Ò(W%ÃG2%²šb%šg7%±«%±%Ê6j%îþS%¼%ÁE6%°ã%¾¦%œCê%É6Ã%»>¼%²ÈÍ%ŸŸÏ%·cÇ%³-ô%³®%ÅHG%¼œc%Í
+&%»ðl%²%¨7Ý%§ÕE%¹×f%±uå%Ê…%½âœ%|%å,^%¤Ó°%Ëy%¹G{%·éÿ%¿ªç%ÇŸ"%ÚE%»t%»z%›Á9%›à%Áڐ%Ƈ%³C%«‚%ÈÔc%¢Ú%³¯&%Ñ>E%¬H%·Í*%™<%¦ˆ&%}¡‰%Îrâ%¬b}%µ¨%›´%І;%¨‹%ŸN%›²%ÂÕ(%®·%»í%®Ï*%ÍΟ%«»Ý%½É=%¹›%´î_%¹Ha%¹fÍ%œ¨s%ЗÑ%Ƭ%¯ib%µ&Æ%Õõ¢%¦ƒÀ%¦E³% Ä5%‘%˜%Šðø%¯Ÿ±%ªÉ´%¶|¶%—|÷%·a%¹¸„%ò.Ã%½>—%f\%ŸX}%š®¢%žŠ†%¯ø%¢õ3%§~¸%¾b:%¡º²%](% Í|%”%ŠM;%´a¤%–ä%Ÿ9s%ÇÔ%à®:%ǘ%¶†Š%Õ U%ÁÓ£%¯þO%¤»Æ%Íÿ´%¹Õá%®”¡%åÄ%¤çY%á.%Ù *%¼šˆ%®ÙÇ%ËBF%À¶P%Œ©à%¦9*%µq%•ÍÐ%¯Õ%ËŸÑ%„t%Ÿ$º%«Ý%ž°%žë‘%¦å‚%•R%¤Ç
+%£ªÈ%›î¾%º%¡ûb%Àƒ•%£%¢ñ%¸CÑ%¨­Ä%Ê…V%›Ò%…¬l%b­H%‹>8%Ĩ„%«Ðn%ÇÌ%¨šÊ%©Î”%¢ôk%ŸQó%žµ%®„C%®#¹%¼Íì%¥dÜ%ºÍ%—óï%«»U%Ãy%¢÷%¢°%´Ö[%¼ø%­ Ô%ÇÐc%¨v¥%É|ñ%³ME%•m%©¸õ%´“<%©2W%œÝÝ%œ<¶%¬*d%ºq	%¶ëÃ%¸V%¦1 %ºª¹%µ»%”
+Ê%¸’%áµ%«þÊ%j%µ%Øâ%¦¶%®CR%˜Pi% cã%¿¶%Ÿ«%ÊÁ%›xÁ%z™ü%´TÞ%´)%­È%­-%¢þ%Å Ã%¸ê‚%»¤”%’Òê%¥ên%¬‘_%Ã9J%¨®% U`%~‘�%­%ëÁÆ%³>½%²öø%’]<%¾2–%ª[%Õ.%˜åz%ØŒó%½É±%ǃ~%´'ð%¸˜E%£ ï%Êtû%½R%¸rÜ%Ë ÿ%ª¬
%¶m%²Óÿ%Œön%¢Nð%к3%©l¦%¸×³%¯°e%¹š%µsv%¥–…%¯%ŸTQ%¿»%±Åø%Þ¹f%ÅÙF%Ê"í%âLÏ%ÄZ¹%±\p%ÌŸ[%¡ã´%Çæ©%×£‹%â%Ł•%¸s^%¹HŒ% ßx%µ\±%à(J%¡jÃ%£T%³Ó%¬¶	%©“y%îÈ%ч¤%¤6a%ÉX %¬r%™J%˜ùW%tJº%¦Î—%­ð*%ªÉ„%šdT%š%ª¨Ä%ÂIÕ%ªùx%·…%¶º¶%È‘7%¶¼Ö%Ä”%—qƒ%«z%±{x%¨v¹%‰*T%«vú%ÇYË%“Ö˜%©ó‚%¾R£% ûf%³ty%¶+„%Οù%Á‘I%ÍI$%߁×%Ø?‰%ÄZz% 83%®uÜ%‹ €%Åòþ%½Õ¡%Ú2«%´7%Ä`%Ò7÷%Ü	Š%§8%ž
ÿ%½ú‘%Òð%Ê€j%ÇŸ%%¸Z!%²©¸%¥¯%¡eÉ%½ûô%¥!G%±Ñ%½W±%•ý%º5%·ˆü%Ÿ¶t%®_%§ç>%Éñ%À”%¤©¾%ª¤·%¥À%²”ö%–O=%«„5%h´%  n% øû%£žá%·!ñ%’õ‹% ³%¬0L%^F%–®=%«I%Å"ä%°%ºÌ%­ë%Š]-%»C+%“°%œ*~%µOG%¥J%¦2²%šØH%›P%º6«%¯•­%º†:%›øº%«N;%¹+—%œ˜%¤¯¸%¯¬%¬Æ‚%ž”%±lÇ%¥`W%˜p¸%£_•% ôD%¡U¿%’Ÿ%©nŒ%×%£¢Î%¦Kß%œìŽ%¨E%¥XØ%—Z)%™¬Ú%² 	%Ÿ@)%µj%7%%£>Ž%•	U%¤>æ%ˆuØ%•=˜%‹Œ%‹»ã%¹²%¦Øü%Ä&8%àè%£’8%ºøý%·»ó%®Í%¬¢®%ˆm>%‘³å%—N0%»Ã*%©g%˜„5%~„	%“h×%†{i%£þ3%š®%Ž*%ŸLõ%”%…Z%¡ŽB%¢
-%©LC%š5w%§
+%©Ü§%šÕ³%§š–%Ž‹ %‘åC%œŒe%¯í>%°ôU%³Rø%³k¼%±Ô%©=á%¹èy%­#»%—Á%¥�%¢\%š_;%¨}A%¦ð"%¶[%©9%•ÔÓ%£]%mw%¥ÄÊ%¡
+%%³?R%©B`%·Œ9%˜µÄ%µ+í%V%œ~%–VÊ%“5%«,U%¯·Ð%Œ…ñ%¦T%µe%Zc%¦ªÂ%è%•¤%ž<%œº%—<1%‹·o%§�o%Šê>%²*!%¼\%œŠ%†å% þ‡% e¤%£þG%ƒi%˜’É%—ïZ%—’j%¦¶é%Aq%¥SÑ% ýú%ªéã%‚þ%ŒÁ
%–…È%§&%Q‚%u¾%˜ÓÌ%Š^c%±SÈ%ÂMk%¡ƒ%ˆ•]%“«9%…x%‡·À%«åÁ%Ÿv %¡ý%ŒÆ%Œs=%˜³“%‡¶ %Dá%‡Ay%œ¢%•ÞŒ%‹¬%£t™%—Kk%‰Ë¥%¤WÃ%§N%·¶©%“]ß%˜b
%Š%¼Z%¬óe%£²†%žê`%”!ú%[>%„Jm%Œ�%%‹Z%š(%˜%†uß%™Vž%¡&%úd%Ÿ«%‹2¼%‚‚í%›%¥4%Ž3,%ŠØ%˜3%Ø\%“±×%ŒU·%‰}‘%¢àû%šZ
+%y%ŒÙ$%Å %j|ž%”NÝ%Žvz%Ÿº±%†•}%ª;%¨¸.%•9%£H@%†ib%fÃ%4r%¨<%“fF%§ÔÏ%£t-%†Ú%Ç3Ø%Ž‰�%‡ÐÃ%¤Ù%”,q%Œ)@%hÚ•%l²I%„?Å%œ™Æ%™1%»Fs%”¹^%œ9ð%£ Ò%žû¹%žpn%~£%Âä‹%¦aˆ%¬«}%¢S%¤í5%ºíŠ%œ›d%­.%³(,%ªœ%%›·)%ƒø%©ñK%›D%«XÞ%«[–%¦ÜQ%›UÆ%¦ä%Ÿ}6%˜'5%š›]%„)ú%³-	%‘7%´�%•N%…Ûž%†<H%˜cX%š´…%“Ï%ž,%‚Á%Â	%ˆ(Õ%™jœ%™Ì%n5t%¬ð}% É¢%vC%£a–%i-�%¡ß³%†ês%†~¦%Ï–%˜üÜ%uÕù%¡a«%›(%ˆnn%•0Ê%’¼v%¢.8%¡%W%ŽåÙ%¤ê0%²î%—ßC%™[–%šñQ%…ÙU% ´b%‘PÝ%—Í+%›¾g%™h %–ë%—G×%ž�}%–a %íà%¤–Š%—ÍA%—Y¾%†I.%•ä#%‹ï”%¢§ç%Œú×%œ
%£%žé%§MØ%fÍ%ŒT %‡ü;%ƒ¿% ¬%3Ú%Më%—_n%Ž|Ï%™ð%•2-%º\½%}o¾%žü¥%‚±8% F%•%Œú%©†"%—W:%Š<%™óá%•AP%—	%†â%©(J%”~þ%íL%‰'%™/ð%€9û%†‹%2%”Ё%–õ$%›œ¦%‰êÂ%šéÒ%–yÈ%ŒhV%xmï%®=ç%’ªL%”)å%šŽ¶%‹‚ù%—JQ%£ññ%Ž¶%“Ï%—g%—a:%ˆ`j%šy[%†I%‡œ%]‘~%˜V'%žcÿ%£ñr%›¤™%£B%’sÂ%˜“â%”n%%†Ö7%•Âµ%›2ÿ%ŒFx%hq'%…ÔE%ˆª/%ˆÜ˜%X®Ä%”†%“lä%¥£%‡�%š#Ï%•Üü%—µ%‘_%¢s%ŒYq%”‹ƒ%š'G%’‚™%¬áh%õ%—°á%—Ž%ŠÌ•%˜Ö%ê%ŽÀ\%›uO%›øÍ%¤l%´)%uõ%ŸÌ©%—P%žr¦%ŒC%Žç%›´%ÀD%•ÌÛ%£Fã%¥Gd%œ<ý%˜z%˜ØX%¤"=%¡õO%•Úô%Ûq%‰Þf%œ+c%ƒy%¡§%¤œÑ%¨SÊ%Ù¸%}
+6%(3%˜Cp%¥ –%“Ð"%žÓ%¤7ñ%‘jw%˜5x%š†ø%œc%‰k«%¢´%„F˜% =%†j%— Ä%žµ—%…·×%£ÆH%Œ²‚%¬Ã€%ŒC%†)…%ŠË¦%“+9%}8%ƒ;%“6%múó%™-~%ƒ
Û%…#¶%‘é¥%­@¬%¬5D%Ÿe%—ôÒ%¥@y%¡€%ƒM%}p+%›À%‰êæ%Œg%zÆ_%ŽmE%™ˆÍ%‹Q%ˆv%¢«0%W{%”}”%ŒâD%˜ô*%“gñ%›
Ù%‡~ÿ%uiÆ%†{•%†”‰%£%ˆ)a%Ÿ§ï%¦—s%~º%…{Ç%ŸFQ%Ž»ß%šú %••¢%•o%€yÆ%žü'%’Ø2%€ù8%’Ê%¤�a%§§m%Zs%ž0[%“ìp%ž4%˜^s%’”%‡F°%×%™y¬%††Ó%œ•p%ŒÜ¿%›µÇ%–i %‡W%£v6%¬Ç0%”"¸%Ù%†p±%€›%EÞ%ˆ# %©¤ï%œ
+”%ƒ}ˆ%¢¤%ŒKW%¬?þ%«èÇ%®ˆ%µü€%ªs%ž—%™h¿%’Ž%¤2ê%‘«T%Še%“be%wÿZ%œ%T%ŸÀ›%ŒGÁ%•£™%—Hœ%“v6%n.%–Nx%ƒÓ%sV%£ ×%ª{…%–Ü¢%y-<%¢g
%gw€%—ÝÔ%› ù%œ³%”2ç%˜ê %“´s%‹,%˜º%švE%·õw%Ž€8%™
l%¢¿Þ%†Ô—%ê½%‚«%’*#%¨xÕ%¢ º%‡H%—01%™vÛ%–n5%§Â1%Šg„%ªë%¨%£QÈ%œèÃ% ±%—M—%¤’ %”¤ˆ%°;Å%”U%”]\%–¹Ò%ž€”% ‹ª%µOà%°#=%™³j%¥_ü%š›ï%lÞ%y%…õI%˜£Y%—Qž%vsl%”Ýo%ˆ[8%tÿ%wÓ%‰¯2%’eÁ%ŽJÞ%á¢%ˆ)%¦mG%™
+‘%”ÑÆ%’¾r%†-%Ÿ˜/%»g%ƒÎ%’J%™ð”%–[I%o‡%šþ%—Á7%¡%ˆ[Ò%–|<%
+Ï%˜Ý%˜HS%›€%“F³%š
+•%x­%¡.%Ø´%“>A%“1g%—ö{%¡€‰%–B%}:€%šÐ¨%•£Á%‹eR%‰ô÷%ŒDŸ%˜¾C%‰€%Ÿ|%%‰•g%ŸîM%§«%a%n%œA%ŒX²%™‰¬%=%˜Ä	%Šàv%©&s%Ží%”tJ%‹@R%Œvu%–'%‹îç%“¾%‰‰Ç% 'B%›
2%Ÿ#%™¿æ%¾ø%®þS%ÖK%Ç­Ä%·�Ï%ÄmÌ%Çm%Ç;9%¤Ïî%ªrB%Ëd#%Ðw‡%Õ›q%ÑÎJ%㌶&ö&#§P&1’&UÔ?&Eéõ&YSˆ&zšº&š§{&Æl-&õÙì'Û'kF&îþ?&Åçb&˜Â&&s÷R&KH˜&-æ°&âr&�L%çv<%ó¿«%é-ä%É©°%¾<%¼»F%³w±%£ ê%ªQ×%»Rv%¯¯à%¾@%²R%¬ó´% E`%”Ÿ%¤ò(%° ½%´3I%£ª˜%ªûÆ%½¬%¹µá%»%%Êð%ÄRü%ÇF%óó%ûÎB&V&¥Ã&)i3&T×´&†Gð&’&&À9&Q›ú&/Çä&Ì%í 9%÷o)%ÊN•%¸…%È%È%´TZ%´•%ÃLf%¥2‹%¦u%¢mÕ%´ÔŸ%²‹­%®u§%®¶%¿ƒY%—IÚ%Æo%¦ó°%7v%­-Ú%¤”%¯aÄ%šS¹%¯›o%£]�% cÔ%³:%¸ž_%ºHÚ%Äùâ%Ã
%žŸ
+%¶Ã.%¾4ª%À”ð%µ-%º®4%µòà%°ËÌ%¹‚‹%­¦‚%ÅŒÇ%²	^%¬ãÈ%½V%ª "%˜ÂB%´¹”%¿ø%·>¥%·9%¯fã%°´q%¾£%¬©½%È Ê%µÑ}%Ã_‘%›u%ÃT%µ¼%¶ý>%Ç÷ª%©Ä±%»ð—%½&¦%ÅŠ#%¯¤%­ºØ%¦åQ%ÃN%Û®%»ÅÞ%ÌP–%¼ö%Ô¸%Ì?%ā%ëËZ%åÌ“%ì¹L&
m&Ç`&£&žÓ&áZ&�#í%ÞqA%Ï[¼%°\%¼£Þ%Â¥ç%¯í”%­C¦%Ènˆ%Èi3%º½%Ìì}%ðL%À†%ÑPm%ÑJå%·r8%¯ºB%ÑM%®`%Îø©%Áä%½û%È:%°è5%®D­%Ÿs%²&% y—%µñb%Â%¼%®)[%³Wà%¹žw%®¹%§]Ë%·D%•ù
%Èõÿ%¾cT%º¸%¶úY%¬¤ª%¤w$%©î„%¡žç%±Í %˜Ø¸%«4p%»Å1%¤Xí%’«~%µƒñ%¤Kí%ºsm%ž˜¿%­·J%¨Z%%¬[ò%ÃêÄ%¬RÈ%šX%Š}´%¥‚Þ%¸u%N•%¨1¿%ªÞ%%œïƒ%–þÙ%žYK%–ö¹%Ÿ¿‹%¡&¹%•f7%Ÿ²›%ž?ª%aÌ…%¥@%­L7%“Åë%Ÿ˜¤%–ÉÌ%–ű%ŸÓœ%švž%/‰%¢`ã%—ý±%­0%Ÿ/€%¡Îã%Œ0%•©c%ïq%”v %T3%¥�%“«Ô%Ÿê%¨A8%É8%ÐÄ%’nÌ%¢é­%‹UÖ%¬¹=%ž¨%¡8q%²%ø%µB%¦Án%¢—®%—üñ% N¡%“$%£%¡+ß%”à¹%W[%¦Ž%%£%šÌr%—oh%Ÿ²©%íÝ%œ¾%Šˆ%…²Ö%§Êþ%³B%­2%·Ë%µÌ%²Þ%€Í%¥¬ê%—’Ï%žÁŒ%“%Ž_Ø%Ž6%±e'%£õb%²W%›Mb% Ý^%œ;¯%²Tr%ž‚^%¼‡%¤?%¨#%²/%®ï%ÀmL%ÙÎÐ%Î÷h%É^?%Áœ[%ÇK)%²ÅŽ%¨Øä%£b'%ºÞ‹%¯ú’%§çø%¥Ñ:%®%Á;ó%¤Vª%©ÿr%œ•+%­ZÆ%¯b¬%«Tû%«’&%³ªÂ%©Õ¼%œøz%žß³%™f%q˜%Œ¤ó%ŸÈ(%§ŽX%ª6Ã%˜Që%Ÿ^è%”ü¤%‹%‹šG%šö§%ŠäY%¤šW%§`c%€íI%¼ê%Š�Â%‚ò%¢Fr%ª_ï%¡¨ %Ÿ%£>²%\%¤™^%§1 %™†B%ˆ’%…8%©]t%”>
%Ÿ0Û%¡vŽ%†÷ã%’ô%’ƒ¯%¬Ô/%„C‘%•‡%‘ä%êo%Œ¤†%æ%¡¨ˆ%Ž»{%Œê¥%šB.%¦o%‘'œ%Ñ€%“%% “»%œÉù%E%’©R%Ùê%Œ;%ŠÍ
%Œ}H%˜¬%“>1%Ì %¡î%Ÿú%š×|%’±%‘í8%“f¾%tê#%˜AÀ%—ö§%]µ%–=ñ%•Žø%……%«>¼%¬òÝ%¢Ž*%ÚM%•€%’k;%•_ß%¸ðñ%¥µO%¢$+%š#$%R§%¢g%¢@o%ˇ¯%þ?”&Ë&?4&”’&‚&4³*&y#&$ë5������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������XTENSION= 'TABLE   '           / Table extension                                BITPIX  =                    8 / Printable ASCII characters                     NAXIS   =                    2 / Simple 2-D matrix                              NAXIS1  =                  336 / Number of characters per row                   NAXIS2  =                    2 / GCOUNT of original file                        PCOUNT  =                    0 / No random parameter                            GCOUNT  =                    1 / Only one group                                 TFIELDS =                   19 / PCOUNT of original file                        EXTNAME = 'y19g0309t.c2h.tab'  / GEIS header file name with .tab                TTYPE1  = 'CRVAL1  '           /                                                CRVAL1  = 'pixel number'       /                                                TFORM1  = 'D25.16  '           /                                                TBCOL1  =                    1 /                                                TTYPE2  = 'CRPIX1  '           /                                                CRPIX1  = 'pixel number of reference pixel' /                                   TFORM2  = 'E15.7   '           /                                                TBCOL2  =                   29 /                                                TTYPE3  = 'CD1_1   '           /                                                CD1_1   = 'pixel increment'    /                                                TFORM3  = 'E15.7   '           /                                                TBCOL3  =                   45 /                                                TTYPE4  = 'DATAMIN '           /                                                DATAMIN = 'the minimum value of the data' /                                     TFORM4  = 'E15.7   '           /                                                TBCOL4  =                   61 /                                                TTYPE5  = 'DATAMAX '           /                                                DATAMAX = 'the maximum value of the data' /                                     TFORM5  = 'E15.7   '           /                                                TBCOL5  =                   77 /                                                TTYPE6  = 'RA_APER '           /                                                RA_APER = 'right ascension of aperture (deg)' /                                 TFORM6  = 'D25.16  '           /                                                TBCOL6  =                   93 /                                                TTYPE7  = 'DEC_APER'           /                                                DEC_APER= 'declination of aperture (deg)' /                                     TFORM7  = 'D25.16  '           /                                                TBCOL7  =                  121 /                                                TTYPE8  = 'FILLCNT '           /                                                FILLCNT = 'number of segments containing fill' /                                TFORM8  = 'I11     '           /                                                TBCOL8  =                  149 /                                                TTYPE9  = 'ERRCNT  '           /                                                ERRCNT  = 'the error count of the data' /                                       TFORM9  = 'I11     '           /                                                TBCOL9  =                  161 /                                                TTYPE10 = 'FPKTTIME'           /                                                FPKTTIME= 'the time of the first packet' /                                      TFORM10 = 'D25.16  '           /                                                TBCOL10 =                  173 /                                                TTYPE11 = 'LPKTTIME'           /                                                LPKTTIME= 'the time of the last packet' /                                       TFORM11 = 'D25.16  '           /                                                TBCOL11 =                  201 /                                                TTYPE12 = 'CTYPE1  '           /                                                CTYPE1  = 'the first coordinate type' /                                         TFORM12 = 'A8      '           /                                                TBCOL12 =                  229 /                                                TTYPE13 = 'APER_POS'           /                                                APER_POS= 'aperture used'      /                                                TFORM13 = 'A8      '           /                                                TBCOL13 =                  241 /                                                TTYPE14 = 'PASS_DIR'           /                                                PASS_DIR= 'polarization pass direction' /                                       TFORM14 = 'I11     '           /                                                TBCOL14 =                  253 /                                                TTYPE15 = 'YPOS    '           /                                                YPOS    = 'y-position on photocathode' /                                        TFORM15 = 'E15.7   '           /                                                TBCOL15 =                  265 /                                                TTYPE16 = 'YTYPE   '           /                                                YTYPE   = 'observation type: OBJ, SKY, BCK' /                                   TFORM16 = 'A4      '           /                                                TBCOL16 =                  281 /                                                TTYPE17 = 'EXPOSURE'           /                                                EXPOSURE= 'exposure time per pixel (seconds)' /                                 TFORM17 = 'E15.7   '           /                                                TBCOL17 =                  289 /                                                TTYPE18 = 'X_OFFSET'           /                                                X_OFFSET= 'x_offset for GIMP correction (diodes)' /                             TFORM18 = 'E15.7   '           /                                                TBCOL18 =                  305 /                                                TTYPE19 = 'Y_OFFSET'           /                                                Y_OFFSET= 'y_offset for GIMP correction (defl.units)' /                         TFORM19 = 'E15.7   '           /                                                TBCOL19 =                  321 /                                                END                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                1.0000000000000000E+00     1.0000000E+00   1.0000000E+00   0.0000000E+00   2.7387550E-15    1.8263573015259999E+02      3.9405888372579994E+01             0           0    4.9099133531036357E+04      4.9099133541163668E+04   PIXEL       SINGLE                0  -1.5160000E+03 OBJ       3.1249689E+01   0.0000000E+00   0.0000000E+00    1.0000000000000000E+00     1.0000000E+00   1.0000000E+00   0.0000000E+00   1.9348280E-15    1.8263573015259999E+02      3.9405888372579994E+01             0           0    4.9099135042899798E+04      4.9099135053027116E+04   PIXEL       SINGLE                0  -1.5160000E+03 OBJ       6.2499371E+01   0.0000000E+00   0.0000000E+00                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                 
\ No newline at end of file
diff --git a/src/test/resources/json/complete-dataset-with-files.json b/src/test/resources/json/complete-dataset-with-files.json
index 2476c8e3356..4fd4e243aba 100644
--- a/src/test/resources/json/complete-dataset-with-files.json
+++ b/src/test/resources/json/complete-dataset-with-files.json
@@ -1,5 +1,9 @@
 {
     "datasetVersion": {
+      "license": {
+        "name": "CC0 1.0",
+        "uri": "http://creativecommons.org/publicdomain/zero/1.0"
+      },
       "metadataBlocks": {
         "citation": {
           "displayName": "Citation Metadata",
diff --git a/src/test/resources/json/dataset-finch1.json b/src/test/resources/json/dataset-finch1.json
index 290e800a9ef..54ec8a3e383 100644
--- a/src/test/resources/json/dataset-finch1.json
+++ b/src/test/resources/json/dataset-finch1.json
@@ -69,4 +69,4 @@
       }
     }
   }
-}
\ No newline at end of file
+}
diff --git a/src/test/resources/netcdf/ICOADS_R3.0.0_1662-10.nc b/src/test/resources/netcdf/ICOADS_R3.0.0_1662-10.nc
new file mode 100644
index 00000000000..30117b7b455
Binary files /dev/null and b/src/test/resources/netcdf/ICOADS_R3.0.0_1662-10.nc differ
diff --git a/src/test/resources/tab/test.tab b/src/test/resources/tab/test.tab
new file mode 100644
index 00000000000..d750d42d995
--- /dev/null
+++ b/src/test/resources/tab/test.tab
@@ -0,0 +1,11 @@
+position	 name	 age
+1	"Belle"	36
+2	"Lola"	37
+3	"Jayden"	45
+4	"Margaret"	37
+5	"Russell"	40
+6	"Bertie"	60
+7	"Maud"	34
+8	"Mabel"	31
+9	"Trevor"	51
+10	"Duane"	26
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-attribs-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-attribs-1.xsd
new file mode 100644
index 00000000000..9c015c04a57
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-attribs-1.xsd
@@ -0,0 +1,82 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      This is the XML Schema common attributes module for XHTML
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+    <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_commonatts"/>
+  </xs:annotation>
+
+  <xs:import namespace="http://www.w3.org/XML/1998/namespace" schemaLocation="../xml.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        This import brings in the XML namespace attributes
+        The module itself does not provide the schemaLocation
+        and expects the driver schema to provide the
+        actual SchemaLocation.
+      </xs:documentation>
+    </xs:annotation>
+  </xs:import>
+
+  <xs:attributeGroup name="id">
+    <xs:attribute name="id" type="xs:ID"/>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="class">
+    <xs:attribute name="class" type="xs:NMTOKENS"/>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="title">
+    <xs:attribute name="title" type="xs:string"/>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="Core.attrib">
+    <xs:attributeGroup ref="id"/>
+    <xs:attributeGroup ref="class"/>
+    <xs:attributeGroup ref="title"/>
+    <xs:attributeGroup ref="Core.extra.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="I18n.attrib">
+    <xs:attribute ref="xml:lang"/>
+    <xs:attributeGroup ref="I18n.extra.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="Common.attrib">
+    <xs:attributeGroup ref="Core.attrib"/>
+    <xs:attributeGroup ref="I18n.attrib"/>
+    <xs:attributeGroup ref="Common.extra"/>
+  </xs:attributeGroup>
+
+
+  <!-- Global attributes -->
+  <xs:attribute name="id" type="xs:ID"/>
+  <xs:attribute name="class" type="xs:NMTOKENS"/>
+  <xs:attribute name="title" type="xs:string"/>
+
+  <xs:attributeGroup name="Global.core.attrib">
+    <xs:attribute ref="id"/>
+    <xs:attribute ref="class"/>
+    <xs:attribute ref="title"/>
+    <xs:attributeGroup ref="Global.core.extra.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="Global.i18n.attrib">
+    <xs:attribute ref="xml:lang"/>
+    <xs:attributeGroup ref="Global.I18n.extra.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="Global.common.attrib">
+    <xs:attributeGroup ref="Global.core.attrib"/>
+    <xs:attributeGroup ref="Global.i18n.attrib"/>
+    <xs:attributeGroup ref="Global.Common.extra"/>
+  </xs:attributeGroup>
+
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-bdo-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-bdo-1.xsd
new file mode 100644
index 00000000000..94290ffcc44
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-bdo-1.xsd
@@ -0,0 +1,85 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema targetNamespace="http://www.w3.org/1999/xhtml"
+            xmlns="http://www.w3.org/1999/xhtml"
+            xmlns:xs="http://www.w3.org/2001/XMLSchema">
+    <xs:annotation>
+       <xs:documentation>
+          Bidirectional Override (bdo) Element
+          This is the XML Schema BDO Element module for XHTML
+
+          This modules declares the element 'bdo' and 'dir' attributes,
+          Used to override the  Unicode bidirectional algorithm for selected
+          fragments of text.
+          Bidirectional text support includes both the bdo element and
+          the 'dir' attribute.
+
+          $Id$
+      </xs:documentation>
+      <xs:documentation source="xhtml-copyright-1.xsd"/>
+      <xs:documentation
+         source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_bdomodule"/>
+    </xs:annotation>
+
+    <xs:import namespace="http://www.w3.org/XML/1998/namespace" schemaLocation="../xml.xsd">
+      <xs:annotation>
+         <xs:documentation>
+           This import brings in the XML namespace attributes
+           The module itself does not provide the schemaLocation
+           and expects the driver schema to provide the
+           actual SchemaLocation.
+         </xs:documentation>
+      </xs:annotation>
+    </xs:import>
+
+    <xs:attributeGroup name="bdo.attlist">
+      <xs:attribute ref="xml:lang"/>
+      <xs:attributeGroup ref="Core.attrib"/>
+      <xs:attribute name="dir" use="required">
+        <xs:simpleType>
+          <xs:restriction base="xs:NMTOKEN">
+            <xs:enumeration value="ltr"/>
+            <xs:enumeration value="rtl"/>
+          </xs:restriction>
+        </xs:simpleType>
+      </xs:attribute>
+    </xs:attributeGroup>
+
+    <xs:group name="bdo.content">
+       <xs:sequence>
+          <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+       </xs:sequence>
+    </xs:group>
+
+    <xs:complexType name="bdo.type" mixed="true">
+      <xs:group ref="bdo.content"/>
+      <xs:attributeGroup ref="bdo.attlist"/>
+    </xs:complexType>
+
+    <xs:element name="bdo" type="bdo.type"/>
+
+    <xs:attributeGroup name="dir.attrib">
+      <xs:attribute name="dir">
+        <xs:simpleType>
+          <xs:restriction base="xs:NMTOKEN">
+            <xs:enumeration value="ltr"/>
+            <xs:enumeration value="rtl"/>
+          </xs:restriction>
+        </xs:simpleType>
+      </xs:attribute>
+    </xs:attributeGroup>
+
+    <!-- Global dir attribute -->
+    <xs:attribute name="dir">
+      <xs:simpleType>
+        <xs:restriction base="xs:NMTOKEN">
+          <xs:enumeration value="ltr"/>
+          <xs:enumeration value="rtl"/>
+        </xs:restriction>
+      </xs:simpleType>
+    </xs:attribute>
+
+    <xs:attributeGroup name="Global.bdo.attrib">
+       <xs:attribute ref="dir"/>
+    </xs:attributeGroup>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkphras-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkphras-1.xsd
new file mode 100644
index 00000000000..feaaaa83922
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkphras-1.xsd
@@ -0,0 +1,206 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema targetNamespace="http://www.w3.org/1999/xhtml" 
+           xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           xmlns="http://www.w3.org/1999/xhtml">
+      
+      
+    <xs:annotation>
+       <xs:documentation>
+          This is the XML Schema Block Phrasal support module for XHTML
+          $Id$
+       </xs:documentation>
+       <xs:documentation source="xhtml-copyright-1.xsd"/>
+    </xs:annotation>
+    
+    <xs:annotation>
+        <xs:documentation>
+           Block Phrasal
+           This module declares the elements and their attributes used to
+           support block-level phrasal markup.
+           This is the XML Schema block phrasal elements module for XHTML
+
+            * address, blockquote, pre, h1, h2, h3, h4, h5, h6
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_textmodule"/>
+    </xs:annotation>
+    
+    <xs:import namespace="http://www.w3.org/XML/1998/namespace" schemaLocation="../xml.xsd"> 
+      <xs:annotation>
+        <xs:documentation>
+          This import brings in the XML namespace attributes 
+          The module itself does not provide the schemaLocation
+          and expects the driver schema to provide the 
+          actual SchemaLocation.
+        </xs:documentation>
+      </xs:annotation>
+    </xs:import>    
+    
+    <!-- address -->
+    <xs:attributeGroup name="address.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+   
+    <xs:group name="address.content">
+       <xs:sequence>
+          <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>       
+       </xs:sequence>
+    </xs:group>
+        
+    <xs:complexType name="address.type" mixed="true">
+        <xs:group ref="address.content"/>      
+        <xs:attributeGroup ref="address.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="address" type="address.type"/>
+    
+    <!-- blockquote -->
+    <xs:attributeGroup name="blockquote.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attribute name="cite" type="URI"/>
+    </xs:attributeGroup>
+
+    <xs:group name="blockquote.content">
+       <xs:sequence>
+          <xs:group ref="Block.mix" maxOccurs="unbounded"/>       
+       </xs:sequence>
+    </xs:group>        
+    
+    <xs:complexType name="blockquote.type">
+        <xs:group ref="blockquote.content"/> 
+        <xs:attributeGroup ref="blockquote.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="blockquote" type="blockquote.type"/>
+    
+    <!-- pre -->
+    <xs:attributeGroup name="pre.attlist">
+        <xs:attribute ref="xml:space"/>
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="pre.content">
+       <xs:sequence>    
+          <xs:group ref="InlinePre.mix" minOccurs="0" maxOccurs="unbounded"/>       
+       </xs:sequence>
+    </xs:group>        
+        
+    <xs:complexType name="pre.type" mixed="true">
+        <xs:group ref="pre.content"/>     
+        <xs:attributeGroup ref="pre.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="pre" type="pre.type"/>
+    
+    <!-- Heading Elements  -->
+    <xs:attributeGroup name="heading.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:complexType name="heading.type" mixed="true">
+        <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+        <xs:attributeGroup ref="heading.attlist"/>
+    </xs:complexType>
+    
+    <xs:attributeGroup name="h1.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="h1.content">
+       <xs:sequence>    
+          <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>    
+       </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="h1.type" mixed="true">
+        <xs:group ref="h1.content"/>
+        <xs:attributeGroup ref="h1.attlist"/>
+    </xs:complexType>
+       
+    <xs:element name="h1" type="h1.type"/>
+    
+    <xs:attributeGroup name="h2.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="h2.content">
+       <xs:sequence>    
+          <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>    
+       </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="h2.type" mixed="true">
+        <xs:group ref="h2.content"/>
+        <xs:attributeGroup ref="h2.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="h2" type="h2.type"/>
+    
+    <xs:attributeGroup name="h3.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="h3.content">
+       <xs:sequence>    
+          <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>    
+       </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="h3.type" mixed="true">
+        <xs:group ref="h3.content"/>
+        <xs:attributeGroup ref="h3.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="h3" type="h3.type"/>
+    
+    <xs:attributeGroup name="h4.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="h4.content">
+       <xs:sequence>    
+          <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>    
+       </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="h4.type" mixed="true">
+        <xs:group ref="h4.content"/>
+        <xs:attributeGroup ref="h4.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="h4" type="h4.type"/>
+    
+    <xs:attributeGroup name="h5.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="h5.content">
+       <xs:sequence>    
+          <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>    
+       </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="h5.type" mixed="true">
+        <xs:group ref="h5.content"/>
+        <xs:attributeGroup ref="h5.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="h5" type="h5.type"/>
+
+    <xs:attributeGroup name="h6.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="h6.content">
+       <xs:sequence>    
+          <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>    
+       </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="h6.type" mixed="true">
+        <xs:group ref="h6.content"/>
+        <xs:attributeGroup ref="h6.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="h6" type="h6.type"/>
+    
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkpres-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkpres-1.xsd
new file mode 100644
index 00000000000..632b7a85792
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkpres-1.xsd
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      This is the XML SchemaBlock presentation element module for XHTML
+      $Id$
+    </xs:documentation>
+  </xs:annotation>
+
+  <xs:annotation>
+    <xs:documentation>
+      Block Presentational Elements
+  
+        * hr
+  
+      This module declares the elements and their attributes used to
+      support block-level presentational markup.
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+    <xs:documentation 
+         source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_presentationmodule"/>    
+  </xs:annotation>
+
+  <xs:attributeGroup name="hr.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+  
+  <xs:group name="hr.content">
+    <xs:sequence/>
+  </xs:group>  
+  
+  <xs:complexType name="hr.type">
+    <xs:group ref="hr.content"/>
+    <xs:attributeGroup ref="hr.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="hr" type="hr.type"/>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkstruct-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkstruct-1.xsd
new file mode 100644
index 00000000000..d4e4afb72d5
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-blkstruct-1.xsd
@@ -0,0 +1,57 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      Block Structural
+
+        * div, p
+  
+      This module declares the elements and their attributes used to
+      support block-level structural markup.            
+          
+      This is the XML Schema Block Structural module for XHTML
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+  <!-- div -->
+  <xs:attributeGroup name="div.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="div.content">
+     <xs:sequence>
+        <xs:group ref="Flow.mix" minOccurs="0" maxOccurs="unbounded"/>     
+     </xs:sequence>
+  </xs:group>    
+
+  <xs:complexType name="div.type" mixed="true">
+    <xs:group ref="div.content"/>     
+    <xs:attributeGroup ref="div.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="div" type="div.type"/>
+
+  <!-- p -->
+  <xs:attributeGroup name="p.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+  
+  <xs:group name="p.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>     
+     </xs:sequence>
+  </xs:group>      
+
+  <xs:complexType name="p.type" mixed="true">
+    <xs:group ref="p.content"/>       
+    <xs:attributeGroup ref="p.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="p" type="p.type"/>
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-charent-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-charent-1.xsd
new file mode 100644
index 00000000000..438f77fc7b1
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-charent-1.xsd
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  This schema module includes three named character entity files.
+-->
+<!DOCTYPE xs:schema [
+<!-- These are the entity sets for ISO Latin 1 characters for the XHTML -->
+<!ENTITY % HTMLlat1 PUBLIC
+   "-//W3C//ENTITIES Latin 1 for XHTML//EN"
+   "xhtml-lat1.ent">
+%HTMLlat1;
+<!-- These are the entity sets for special characters for the XHTML -->
+<!ENTITY % HTMLsymbol PUBLIC
+   "-//W3C//ENTITIES Symbols for XHTML//EN"
+   "xhtml-symbol.ent">
+%HTMLsymbol;
+<!-- These are the entity sets for symbol characters for the XHTML -->
+<!ENTITY % HTMLspecial PUBLIC
+   "-//W3C//ENTITIES Special for XHTML//EN"
+   "xhtml-special.ent">
+%HTMLspecial;
+]>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      Character Entities for XHTML    
+      This is the XML Schema Character Entities module for XHTML
+
+      This module declares the set of character entities for XHTML,
+      including the Latin 1, Symbol and Special character collections.
+      XML Schema does not support Entities, hence Entities are enable
+      through an Internal DTD Subset.
+      
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-datatypes-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-datatypes-1.xsd
new file mode 100644
index 00000000000..8509d396809
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-datatypes-1.xsd
@@ -0,0 +1,147 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+            targetNamespace="http://www.w3.org/1999/xhtml"
+            xmlns="http://www.w3.org/1999/xhtml">
+
+    <xs:annotation>
+        <xs:documentation>
+          XHTML Datatypes
+          This is the XML Schema datatypes module for XHTML
+          
+          Defines containers for the XHTML datatypes, many of
+          these imported from other specifications and standards.
+          
+          $Id$
+        </xs:documentation>
+        <xs:documentation source="xhtml-copyright-1.xsd"/>
+        <xs:documentation 
+            source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstraction.html#s_common_attrtypes"/>        
+    </xs:annotation>
+    
+    <!-- nn for pixels or nn% for percentage length -->
+    <xs:simpleType name="Length">
+        <xs:union memberTypes="xs:nonNegativeInteger">
+           <xs:simpleType>
+             <xs:restriction base="xs:token">
+               <xs:pattern value="\d+[%]|\d*\.\d+[%]"/>
+              </xs:restriction>
+           </xs:simpleType>         
+        </xs:union>
+    </xs:simpleType>
+    
+    <!-- space-separated list of link types -->
+    <xs:simpleType name="LinkTypes">
+        <xs:list itemType="xs:NMTOKEN"/>
+    </xs:simpleType>
+    
+    <!-- single or comma-separated list of media descriptors -->
+    <xs:simpleType name="MediaDesc">
+        <xs:restriction base="xs:string"/>
+    </xs:simpleType>
+    
+    <!-- pixel, percentage, or relative -->
+    <xs:simpleType name="MultiLength">
+        <xs:union memberTypes="Length">
+           <xs:simpleType>
+             <xs:restriction base="xs:token">
+               <xs:pattern value="\d*\*"/>
+             </xs:restriction>
+           </xs:simpleType>         
+        </xs:union>
+    </xs:simpleType>
+    
+    <!-- one or more digits (NUMBER) -->
+    <xs:simpleType name="Number">
+        <xs:restriction base="xs:nonNegativeInteger"/>
+    </xs:simpleType>
+    
+    <!-- integer representing length in pixels -->
+    <xs:simpleType name="Pixels">
+        <xs:restriction base="xs:nonNegativeInteger"/>
+    </xs:simpleType>
+    
+    <!-- script expression -->
+    <xs:simpleType name="Script">
+        <xs:restriction base="xs:string"/>
+    </xs:simpleType>
+    
+    <!-- sixteen color names or RGB color expression-->
+    <xs:simpleType name="Color">
+        <xs:union memberTypes="xs:NMTOKEN">    
+           <xs:simpleType>                      
+              <xs:restriction base="xs:token">
+                 <xs:pattern value="#[0-9a-fA-F]{6}"/>
+              </xs:restriction>
+           </xs:simpleType>         
+        </xs:union>
+    </xs:simpleType>
+    
+    <!-- textual content -->
+    <xs:simpleType name="Text">
+        <xs:restriction base="xs:string"/>
+    </xs:simpleType>
+    
+    <!-- Imported Datatypes  -->
+    <!-- a single character, as per section 2.2 of [XML] -->
+    <xs:simpleType name="Character">
+        <xs:restriction base="xs:string">
+           <xs:length value="1" fixed="true"/>
+        </xs:restriction>
+    </xs:simpleType>
+    
+    <!-- a character encoding, as per [RFC2045] -->
+    <xs:simpleType name="Charset">
+        <xs:restriction base="xs:string"/>
+    </xs:simpleType>
+    
+    <!-- a space separated list of character encodings, as per [RFC2045] -->
+    <xs:simpleType name="Charsets">
+        <xs:list itemType="Charset"/>
+    </xs:simpleType>
+    
+    <!-- media type, as per [RFC2045] -->
+    <xs:simpleType name="ContentType">
+        <xs:list itemType="xs:string"/>
+    </xs:simpleType>
+    
+    <!-- comma-separated list of media types, as per [RFC2045] -->
+    <xs:simpleType name="ContentTypes">
+        <xs:list itemType="xs:string"/>
+    </xs:simpleType>
+    
+    <!-- date and time information. ISO date format -->
+    <xs:simpleType name="Datetime">
+        <xs:restriction base="xs:dateTime"/>
+    </xs:simpleType>
+    
+    <!-- formal public identifier, as per [ISO8879] -->
+    <xs:simpleType name="FPI">
+        <xs:restriction base="xs:normalizedString"/>
+    </xs:simpleType>
+    
+    <!-- a language code, as per [RFC3066] -->
+    <xs:simpleType name="LanguageCode">
+        <xs:restriction base="xs:language"/>
+    </xs:simpleType>
+    
+    <!-- a Uniform Resource Identifier, see [URI] -->
+    <xs:simpleType name="URI">
+        <xs:restriction base="xs:anyURI"/>
+    </xs:simpleType>
+    
+    <!-- a space-separated list of Uniform Resource Identifiers, see [URI] -->
+    <xs:simpleType name="URIs">
+        <xs:list itemType="xs:anyURI"/>
+    </xs:simpleType>
+    
+    <!-- comma-separated list of MultiLength -->
+    <xs:simpleType name="MultiLengths">
+        <xs:restriction base="xs:string"/>
+    </xs:simpleType>
+    
+    <!-- character Data -->
+    <xs:simpleType name="CDATA">
+        <xs:restriction base="xs:string"/>
+    </xs:simpleType>
+        
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-framework-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-framework-1.xsd
new file mode 100644
index 00000000000..89573569329
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-framework-1.xsd
@@ -0,0 +1,74 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      This is the XML Schema Modular Framework support module for XHTML
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+  <xs:annotation>
+    <xs:documentation>
+      XHTML Modular Framework
+      This required module instantiates the necessary modules
+      needed to support the XHTML modularization framework.
+
+      The Schema modules instantiated are:
+        +  notations
+        +  datatypes
+        +  common attributes
+        +  character entities
+    </xs:documentation>
+    <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_commonatts"/>
+  </xs:annotation>
+
+  <xs:include schemaLocation="xhtml-notations-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+         Notations module
+         Declares XHTML notations for Attribute data types
+      </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-datatypes-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        This module defines XHTML Attribute DataTypes
+      </xs:documentation>
+      <xs:documentation
+          source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstraction.html#s_common_attrtypes"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-attribs-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        This module defines Common attributes for XHTML
+      </xs:documentation>
+      <xs:documentation
+          source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_commonatts"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-charent-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Character entities module
+        Note: Entities are not supported in XML Schema
+        The Schema Module uses DTDs to define Entities
+
+        This module defines
+          + XHTML Latin 1 Character Entities
+          + XHTML Special Characters
+          + XHTML Mathematical, Greek, and Symbolic Characters
+    </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-hypertext-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-hypertext-1.xsd
new file mode 100644
index 00000000000..8d88125ace1
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-hypertext-1.xsd
@@ -0,0 +1,51 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      Hypertext Module
+      This is the XML Schema Hypertext module for XHTML
+            
+        * a
+            
+      This module declares the anchor ('a') element type, which
+      defines the source of a hypertext link. The destination
+      (or link 'target') is identified via its 'id' attribute 
+      rather than the 'name' attribute as was used in HTML.
+
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+    <xs:documentation
+        source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_hypertextmodule"/>    
+  </xs:annotation>
+
+
+  <xs:attributeGroup name="a.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+    <xs:attribute name="href" type="URI"/>
+    <xs:attribute name="charset" type="Charset"/>
+    <xs:attribute name="type" type="ContentType"/>
+    <xs:attribute name="hreflang" type="LanguageCode"/>
+    <xs:attribute name="rel" type="LinkTypes"/>
+    <xs:attribute name="rev" type="LinkTypes"/>
+    <xs:attribute name="accesskey" type="Character"/>
+    <xs:attribute name="tabindex" type="Number"/>
+  </xs:attributeGroup>
+   
+  <xs:group name="a.content">
+     <xs:sequence>
+        <xs:group ref="InlNoAnchor.mix" minOccurs="0" maxOccurs="unbounded"/>     
+     </xs:sequence>
+  </xs:group>  
+
+  <xs:complexType name="a.type" mixed="true">
+     <xs:group ref="a.content"/>
+     <xs:attributeGroup ref="a.attlist"/>
+  </xs:complexType>
+ 
+  <xs:element name="a" type="a.type"/>
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlphras-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlphras-1.xsd
new file mode 100644
index 00000000000..19a93cf9f71
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlphras-1.xsd
@@ -0,0 +1,220 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+         This is the XML Schema Inline Phrasal support module for XHTML
+         $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+  <xs:annotation>
+    <xs:documentation>
+      Inline Phrasal.
+      This module declares the elements and their attributes used to
+      support inline-level phrasal markup.
+      This is the XML Schema Inline Phrasal module for XHTML
+
+        * abbr, acronym, cite, code, dfn, em, kbd, q, samp, strong, var
+
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_textmodule"/>
+  </xs:annotation>
+
+
+  <xs:attributeGroup name="abbr.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="abbr.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="abbr.type" mixed="true">
+    <xs:group ref="abbr.content"/>
+    <xs:attributeGroup ref="abbr.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="abbr" type="abbr.type"/>
+
+  <xs:attributeGroup name="acronym.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="acronym.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="acronym.type" mixed="true">
+    <xs:group ref="acronym.content"/>
+    <xs:attributeGroup ref="acronym.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="acronym" type="acronym.type"/>
+
+  <xs:attributeGroup name="cite.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="cite.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="cite.type" mixed="true">
+    <xs:group ref="cite.content"/>
+    <xs:attributeGroup ref="cite.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="cite" type="cite.type"/>
+
+  <xs:attributeGroup name="code.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="code.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="code.type" mixed="true">
+    <xs:group ref="code.content"/>
+    <xs:attributeGroup ref="code.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="code" type="code.type"/>
+
+  <xs:attributeGroup name="dfn.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="dfn.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="dfn.type" mixed="true">
+    <xs:group ref="dfn.content"/>
+    <xs:attributeGroup ref="dfn.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="dfn" type="dfn.type"/>
+
+  <xs:attributeGroup name="em.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="em.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="em.type" mixed="true">
+    <xs:group ref="em.content"/>
+    <xs:attributeGroup ref="em.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="em" type="em.type"/>
+
+
+  <xs:attributeGroup name="kbd.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="kbd.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="kbd.type" mixed="true">
+    <xs:group ref="kbd.content"/>
+    <xs:attributeGroup ref="kbd.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="kbd" type="kbd.type"/>
+
+
+  <xs:attributeGroup name="samp.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="samp.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="samp.type" mixed="true">
+    <xs:group ref="samp.content"/>
+    <xs:attributeGroup ref="samp.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="samp" type="samp.type"/>
+
+
+  <xs:attributeGroup name="strong.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="strong.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="strong.type" mixed="true">
+    <xs:group ref="strong.content"/>
+    <xs:attributeGroup ref="strong.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="strong" type="strong.type"/>
+
+  <xs:attributeGroup name="var.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="var.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="var.type" mixed="true">
+    <xs:group ref="var.content"/>
+    <xs:attributeGroup ref="var.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="var" type="var.type"/>
+
+  <xs:attributeGroup name="q.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+    <xs:attribute name="cite" type="URI"/>
+  </xs:attributeGroup>
+
+  <xs:group name="q.content">
+     <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+     </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="q.type" mixed="true">
+    <xs:group ref="q.content"/>
+    <xs:attributeGroup ref="q.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="q" type="q.type"/>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlpres-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlpres-1.xsd
new file mode 100644
index 00000000000..400a034dcef
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlpres-1.xsd
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      This is the XML Schema Inline Presentation element module for XHTML
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+  <xs:annotation>
+    <xs:documentation>
+      Inline Presentational Elements
+    
+        * b, big, i, small, sub, sup, tt
+    
+      This module declares the elements and their attributes used to
+      support inline-level presentational markup.
+    </xs:documentation>
+    <xs:documentation 
+         source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_presentationmodule"/>
+  </xs:annotation>
+
+  <xs:attributeGroup name="InlPres.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="InlPres.content">
+    <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:group>  
+    
+  <xs:complexType name="InlPres.type" mixed="true">
+    <xs:group ref="InlPres.content"/>
+    <xs:attributeGroup ref="InlPres.attlist"/>
+  </xs:complexType>
+    
+  <xs:element name="b" type="InlPres.type"/>
+
+  <xs:element name="big" type="InlPres.type"/>
+
+  <xs:element name="i" type="InlPres.type"/>
+
+  <xs:element name="small" type="InlPres.type"/>
+
+  <xs:element name="sub" type="InlPres.type"/>
+
+  <xs:element name="sup" type="InlPres.type"/>
+
+  <xs:element name="tt" type="InlPres.type"/>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlstruct-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlstruct-1.xsd
new file mode 100644
index 00000000000..f53715da6b4
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlstruct-1.xsd
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+
+  <xs:annotation>
+    <xs:documentation>
+         This is the XML Schema Inline Structural support module for XHTML
+         $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+  <xs:annotation>
+    <xs:documentation>
+      Inline Structural.
+      This module declares the elements and their attributes 
+      used to support inline-level structural markup.      
+      This is the XML Schema Inline Structural element module for XHTML
+
+        * br, span
+      
+    </xs:documentation>
+    <xs:documentation source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_textmodule"/>
+  </xs:annotation>
+
+  <xs:attributeGroup name="br.attlist">
+    <xs:attributeGroup ref="Core.attrib"/>
+  </xs:attributeGroup>
+  
+  <xs:group name="br.content">
+     <xs:sequence/>
+  </xs:group>  
+
+  <xs:complexType name="br.type">
+    <xs:group ref="br.content"/>    
+    <xs:attributeGroup ref="br.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="br" type="br.type"/>
+
+  <xs:attributeGroup name="span.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+  
+  <xs:group name="span.content">
+    <xs:sequence>
+       <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>     
+    </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="span.type" mixed="true">
+    <xs:group ref="span.content"/>   
+    <xs:attributeGroup ref="span.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="span" type="span.type"/>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlstyle-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlstyle-1.xsd
new file mode 100644
index 00000000000..b946c3978e7
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-inlstyle-1.xsd
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      Inline Style module    
+      This is the XML Schema Inline Style module for XHTML
+      
+         * styloe attribute
+
+      This module declares the 'style' attribute, used to support inline 
+      style markup. 
+
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+    <xs:documentation 
+       source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_styleattributemodule"/>    
+  </xs:annotation>
+
+  <xs:attributeGroup name="style.attrib">
+    <xs:attribute name="style" type="CDATA"/>
+  </xs:attributeGroup>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-list-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-list-1.xsd
new file mode 100644
index 00000000000..7503a1fce62
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-list-1.xsd
@@ -0,0 +1,128 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      List Module
+      This is the XML Schema Lists module for XHTML
+      List Module Elements
+    
+        * dl, dt, dd, ol, ul, li
+    
+      This module declares the list-oriented element types
+      and their attributes.
+      $Id$      
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+    <xs:documentation
+      source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_listmodule"/>      
+  </xs:annotation>
+
+  <xs:attributeGroup name="dt.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+  
+  <xs:group name="dt.content">
+    <xs:sequence>
+      <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="dt.type" mixed="true">
+    <xs:group ref="dt.content"/>
+    <xs:attributeGroup ref="dt.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="dt" type="dt.type"/>
+
+  <xs:attributeGroup name="dd.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+  
+  <xs:group name="dd.content">
+    <xs:sequence>
+      <xs:group ref="Flow.mix" minOccurs="0" maxOccurs="unbounded"/>  
+    </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="dd.type" mixed="true">
+    <xs:group ref="dd.content"/>
+    <xs:attributeGroup ref="dd.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="dd" type="dd.type"/>
+
+  <xs:attributeGroup name="dl.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="dl.content">
+    <xs:sequence>
+      <xs:choice maxOccurs="unbounded">
+        <xs:element ref="dt"/>
+        <xs:element ref="dd"/>
+      </xs:choice>
+    </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="dl.type">
+    <xs:group ref="dl.content"/>
+    <xs:attributeGroup ref="dl.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="dl" type="dl.type"/>
+
+  <xs:attributeGroup name="li.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+  
+  <xs:group name="li.content">
+    <xs:sequence>
+      <xs:group ref="Flow.mix" minOccurs="0" maxOccurs="unbounded"/>
+    </xs:sequence>      
+  </xs:group>
+
+  <xs:complexType name="li.type" mixed="true">
+    <xs:group ref="li.content"/>  
+    <xs:attributeGroup ref="li.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="li" type="li.type"/>
+
+  <xs:attributeGroup name="ol.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+
+  <xs:group name="ol.content">
+    <xs:sequence>
+      <xs:element ref="li" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="ol.type">
+    <xs:group ref="ol.content"/>  
+    <xs:attributeGroup ref="ol.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="ol" type="ol.type"/>
+
+  <xs:attributeGroup name="ul.attlist">
+    <xs:attributeGroup ref="Common.attrib"/>
+  </xs:attributeGroup>
+  
+  <xs:group name="ul.content">
+    <xs:sequence>
+      <xs:element ref="li" maxOccurs="unbounded"/>
+    </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="ul.type">
+    <xs:group ref="ul.content"/>    
+    <xs:attributeGroup ref="ul.attlist"/>
+  </xs:complexType>
+
+  <xs:element name="ul" type="ul.type"/>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-notations-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-notations-1.xsd
new file mode 100644
index 00000000000..948a93e28bd
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-notations-1.xsd
@@ -0,0 +1,105 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      Notations module
+      This is the XML Schema module for data type notations for XHTML
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+  
+  <xs:annotation>
+    <xs:documentation>
+      Notations module
+      Defines the XHTML notations, many of these imported from 
+      other specifications and standards. When an existing FPI is
+      known, it is incorporated here.            
+    </xs:documentation>
+    <xs:documentation
+         source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstraction.html#s_common_attrtypes"/>        
+  </xs:annotation>
+
+  <!-- W3C XML 1.0 Recommendation -->
+  <xs:notation name="w3c-xml"
+    public="ISO 8879//NOTATION Extensible Markup Language (XML) 1.0//EN"/>
+
+  <!-- XML 1.0 CDATA -->
+  <xs:notation name="cdata" public="-//W3C//NOTATION XML 1.0: CDATA//EN"/>
+  
+  <!-- SGML Formal Public Identifiers -->
+  <xs:notation name="fpi"
+    public="ISO 8879:1986//NOTATION Formal Public Identifier//EN"/>
+
+  <!-- XHTML Notations ... -->
+  <!-- Length defined for cellpadding/cellspacing -->
+  <!-- nn for pixels or nn% for percentage length -->
+  <!-- a single character, as per section 2.2 of [XML] -->
+  <xs:notation name="character"
+    public="-//W3C//NOTATION XHTML Datatype: Character//EN"/>
+
+  <!-- a character encoding, as per [RFC2045] -->
+  <xs:notation name="charset"
+    public="-//W3C//NOTATION XHTML Datatype: Charset//EN"/>
+
+  <!-- a space separated list of character encodings, as per [RFC2045] -->
+  <xs:notation name="charsets"
+    public="-//W3C//NOTATION XHTML Datatype: Charsets//EN"/>
+
+  <!-- media type, as per [RFC2045] -->
+  <xs:notation name="contentType"
+    public="-//W3C//NOTATION XHTML Datatype: ContentType//EN"/>
+
+  <!-- comma-separated list of media types, as per [RFC2045] -->
+  <xs:notation name="contentTypes"
+    public="-//W3C//NOTATION XHTML Datatype: ContentTypes//EN"/>
+
+  <!-- date and time information. ISO date format -->
+  <xs:notation name="datetime"
+    public="-//W3C//NOTATION XHTML Datatype: Datetime//EN"/>
+
+  <!-- a language code, as per [RFC3066] -->
+  <xs:notation name="languageCode"
+    public="-//W3C//NOTATION XHTML Datatype: LanguageCode//EN"/>
+
+  <!-- nn for pixels or nn% for percentage length -->
+  <xs:notation name="length"
+    public="-//W3C//NOTATION XHTML Datatype: Length//EN"/>
+
+  <!-- space-separated list of link types -->
+  <xs:notation name="linkTypes"
+    public="-//W3C//NOTATION XHTML Datatype: LinkTypes//EN"/>
+
+  <!-- single or comma-separated list of media descriptors -->
+  <xs:notation name="mediaDesc"
+    public="-//W3C//NOTATION XHTML Datatype: MediaDesc//EN"/>
+
+  <!-- pixel, percentage, or relative -->
+  <xs:notation name="multiLength"
+    public="-//W3C//NOTATION XHTML Datatype: MultiLength//EN"/>
+
+  <!-- one or more digits (NUMBER) -->
+  <xs:notation name="number"
+    public="-//W3C//NOTATION XHTML Datatype: Number//EN"/>
+
+  <!-- one or more digits (NUMBER) -->
+  <xs:notation name="pixels"
+    public="-//W3C//NOTATION XHTML Datatype: Pixels//EN"/>
+
+  <!-- script expression -->
+  <xs:notation name="script"
+    public="-//W3C//NOTATION XHTML Datatype: Script//EN"/>
+
+  <!-- textual content -->
+  <xs:notation name="text" public="-//W3C//NOTATION XHTML Datatype: Text//EN"/>
+
+  <!-- a Uniform Resource Identifier, see [URI] -->
+  <xs:notation name="uri" public="-//W3C//NOTATION XHTML Datatype: URI//EN"/>
+
+  <!-- a space-separated list of Uniform Resource Identifiers, see [URI] -->
+  <xs:notation name="uris" public="-//W3C//NOTATION XHTML Datatype: URIs//EN"/>
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-pres-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-pres-1.xsd
new file mode 100644
index 00000000000..2ce4cf208dc
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-pres-1.xsd
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      This is the XML Schema Presentation module for XHTML
+      This is a REQUIRED module.
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+  <xs:annotation>
+    <xs:documentation>
+      Presentational Elements
+
+      This module defines elements and their attributes for
+      simple presentation-related markup.
+ 
+      Elements defined here:
+
+        * hr
+        * b, big, i, small, sub, sup, tt
+    </xs:documentation>
+    <xs:documentation 
+        source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_presentationmodule"/>
+  </xs:annotation>
+
+ <xs:include schemaLocation="xhtml-blkpres-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Block Presentational module
+        Elements defined here:
+ 
+         * hr
+      </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-inlpres-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Inline Presentational module
+        Elements defined here:
+
+          * b, big, i, small, sub, sup, tt
+    </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-table-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-table-1.xsd
new file mode 100644
index 00000000000..ba015e40e70
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-table-1.xsd
@@ -0,0 +1,323 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema targetNamespace="http://www.w3.org/1999/xhtml" 
+           xmlns="http://www.w3.org/1999/xhtml" 
+           xmlns:xs="http://www.w3.org/2001/XMLSchema">
+    <xs:annotation>
+        <xs:documentation>
+          This is the XML Schema Tables module for XHTML
+          $Id$
+        </xs:documentation>
+        <xs:documentation source="xhtml-copyright-1.xsd"/>
+    </xs:annotation>
+    <xs:annotation>
+        <xs:documentation>
+          Tables
+      
+           * table, caption, thead, tfoot, tbody, colgroup, col, tr, th, td
+      
+          This module declares element types and attributes used to provide
+          table markup similar to HTML 4.0, including features that enable
+          better accessibility for non-visual user agents.
+        </xs:documentation>
+        <xs:documentation 
+           source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_tablemodule"/>
+    </xs:annotation>
+
+    <xs:attributeGroup name="frame.attrib">
+        <xs:attribute name="frame">
+            <xs:simpleType>
+                <xs:restriction base="xs:NMTOKEN">
+                    <xs:enumeration value="void"/>
+                    <xs:enumeration value="above"/>
+                    <xs:enumeration value="below"/>
+                    <xs:enumeration value="hsides"/>
+                    <xs:enumeration value="lhs"/>
+                    <xs:enumeration value="rhs"/>
+                    <xs:enumeration value="vsides"/>
+                    <xs:enumeration value="box"/>
+                    <xs:enumeration value="border"/>
+                </xs:restriction>
+            </xs:simpleType>
+        </xs:attribute>
+    </xs:attributeGroup>
+    
+    <xs:attributeGroup name="rules.attrib">
+        <xs:attribute name="rules">
+            <xs:simpleType>
+                <xs:restriction base="xs:NMTOKEN">
+                    <xs:enumeration value="none"/>
+                    <xs:enumeration value="groups"/>
+                    <xs:enumeration value="rows"/>
+                    <xs:enumeration value="cols"/>
+                    <xs:enumeration value="all"/>
+                </xs:restriction>
+            </xs:simpleType>
+        </xs:attribute>
+    </xs:attributeGroup>
+    
+    <xs:attributeGroup name="CellVAlign.attrib">
+        <xs:attribute name="valign">
+            <xs:simpleType>
+                <xs:restriction base="xs:NMTOKEN">
+                    <xs:enumeration value="top"/>
+                    <xs:enumeration value="middle"/>
+                    <xs:enumeration value="bottom"/>
+                    <xs:enumeration value="baseline"/>
+                </xs:restriction>
+            </xs:simpleType>
+        </xs:attribute>
+    </xs:attributeGroup>
+    
+    <xs:attributeGroup name="CellHAlign.attrib">
+        <xs:attribute name="align">
+            <xs:simpleType>
+                <xs:restriction base="xs:NMTOKEN">
+                    <xs:enumeration value="left"/>
+                    <xs:enumeration value="center"/>
+                    <xs:enumeration value="right"/>
+                    <xs:enumeration value="justify"/>
+                    <xs:enumeration value="char"/>
+                </xs:restriction>
+            </xs:simpleType>
+        </xs:attribute>
+        <xs:attribute name="char" type="Character"/>
+        <xs:attribute name="charoff" type="Length"/>
+    </xs:attributeGroup>
+    
+    <xs:attributeGroup name="scope.attrib">
+        <xs:attribute name="scope">
+            <xs:simpleType>
+                <xs:restriction base="xs:NMTOKEN">
+                    <xs:enumeration value="row"/>
+                    <xs:enumeration value="col"/>
+                    <xs:enumeration value="rowgroup"/>
+                    <xs:enumeration value="colgroup"/>
+                </xs:restriction>
+            </xs:simpleType>
+        </xs:attribute>
+    </xs:attributeGroup>
+    
+    <xs:attributeGroup name="td.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attribute name="abbr" type="Text"/>
+        <xs:attribute name="axis" type="CDATA"/>
+        <xs:attribute name="headers" type="xs:IDREFS"/>
+        <xs:attributeGroup ref="scope.attrib"/>
+        <xs:attribute name="rowspan" type="Number" default="1"/>
+        <xs:attribute name="colspan" type="Number" default="1"/>
+        <xs:attributeGroup ref="CellHAlign.attrib"/>
+        <xs:attributeGroup ref="CellVAlign.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="td.content">
+       <xs:sequence>
+          <xs:group ref="Flow.mix" minOccurs="0" maxOccurs="unbounded"/>
+       </xs:sequence>
+    </xs:group>    
+    
+    <xs:complexType name="td.type" mixed="true">
+        <xs:group ref="td.content"/>
+        <xs:attributeGroup ref="td.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="td" type="td.type"/>
+    
+    <xs:attributeGroup name="th.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attribute name="abbr" type="Text"/>
+        <xs:attribute name="axis" type="CDATA"/>
+        <xs:attribute name="headers" type="xs:IDREFS"/>
+        <xs:attributeGroup ref="scope.attrib"/>
+        <xs:attribute name="rowspan" type="Number" default="1"/>
+        <xs:attribute name="colspan" type="Number" default="1"/>
+        <xs:attributeGroup ref="CellHAlign.attrib"/>
+        <xs:attributeGroup ref="CellVAlign.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="th.content">
+       <xs:sequence>
+          <xs:group ref="Flow.mix" minOccurs="0" maxOccurs="unbounded"/>
+       </xs:sequence>
+    </xs:group>     
+    
+    <xs:complexType name="th.type" mixed="true">
+        <xs:group ref="th.content"/>
+        <xs:attributeGroup ref="th.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="th" type="th.type"/>
+    
+    <xs:attributeGroup name="tr.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attributeGroup ref="CellHAlign.attrib"/>
+        <xs:attributeGroup ref="CellVAlign.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="tr.content">
+      <xs:sequence>
+        <xs:choice maxOccurs="unbounded">
+            <xs:element ref="th"/>
+            <xs:element ref="td"/>
+        </xs:choice>
+      </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="tr.type">
+        <xs:group ref="tr.content"/>
+        <xs:attributeGroup ref="tr.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="tr" type="tr.type"/>
+    
+    <xs:attributeGroup name="col.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attribute name="span" type="Number" default="1"/>
+        <xs:attribute name="width" type="MultiLength"/>
+        <xs:attributeGroup ref="CellHAlign.attrib"/>
+        <xs:attributeGroup ref="CellVAlign.attrib"/>
+    </xs:attributeGroup>
+
+    <xs:group name="col.content">
+       <xs:sequence/>
+    </xs:group>             
+    
+    <xs:complexType name="col.type">
+        <xs:group ref="col.content"/>    
+        <xs:attributeGroup ref="col.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="col" type="col.type"/>
+    
+    <xs:attributeGroup name="colgroup.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attribute name="span" type="Number" default="1"/>
+        <xs:attribute name="width" type="MultiLength"/>
+        <xs:attributeGroup ref="CellHAlign.attrib"/>
+        <xs:attributeGroup ref="CellVAlign.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="colgroup.content">
+        <xs:sequence>
+            <xs:element ref="col" minOccurs="0" maxOccurs="unbounded"/>
+        </xs:sequence>
+    </xs:group>
+    <xs:complexType name="colgroup.type">
+        <xs:group ref="colgroup.content"/>
+        <xs:attributeGroup ref="colgroup.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="colgroup" type="colgroup.type"/>
+    
+    <xs:attributeGroup name="tbody.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attributeGroup ref="CellHAlign.attrib"/>
+        <xs:attributeGroup ref="CellVAlign.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="tbody.content">
+        <xs:sequence>
+            <xs:element ref="tr" maxOccurs="unbounded"/>
+        </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="tbody.type">
+        <xs:group ref="tbody.content"/>
+        <xs:attributeGroup ref="tbody.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="tbody" type="tbody.type"/>
+    
+    <xs:attributeGroup name="tfoot.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attributeGroup ref="CellHAlign.attrib"/>
+        <xs:attributeGroup ref="CellVAlign.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="tfoot.content">
+        <xs:sequence>
+            <xs:element ref="tr" maxOccurs="unbounded"/>
+        </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="tfoot.type">
+        <xs:group ref="tfoot.content"/>
+        <xs:attributeGroup ref="tfoot.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="tfoot" type="tfoot.type"/>
+    
+    <xs:attributeGroup name="thead.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attributeGroup ref="CellHAlign.attrib"/>
+        <xs:attributeGroup ref="CellVAlign.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="thead.content">
+        <xs:sequence>
+            <xs:element ref="tr" maxOccurs="unbounded"/>
+        </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="thead.type">
+        <xs:group ref="thead.content"/>
+        <xs:attributeGroup ref="thead.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="thead" type="thead.type"/>
+    
+    <xs:attributeGroup name="caption.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="caption.content">
+       <xs:sequence>
+         <xs:group ref="Inline.mix" minOccurs="0" maxOccurs="unbounded"/>
+       </xs:sequence>       
+    </xs:group>    
+
+    <xs:complexType name="caption.type" mixed="true">
+        <xs:group ref="caption.content"/>
+        <xs:attributeGroup ref="caption.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="caption" type="caption.type"/>
+    
+    <xs:attributeGroup name="table.attlist">
+        <xs:attributeGroup ref="Common.attrib"/>
+        <xs:attribute name="summary" type="Text"/>
+        <xs:attribute name="width" type="Length"/>
+        <xs:attribute name="border" type="Pixels"/>
+        <xs:attributeGroup ref="frame.attrib"/>
+        <xs:attributeGroup ref="rules.attrib"/>
+        <xs:attribute name="cellspacing" type="Length"/>
+        <xs:attribute name="cellpadding" type="Length"/>
+    </xs:attributeGroup>
+    
+    <xs:group name="table.content">
+        <xs:sequence>
+            <xs:element ref="caption" minOccurs="0"/>
+            <xs:choice>
+                <xs:element ref="col" minOccurs="0" maxOccurs="unbounded"/>
+                <xs:element ref="colgroup" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:choice>
+            <xs:choice>
+                <xs:sequence>
+                    <xs:element ref="thead" minOccurs="0"/>
+                    <xs:element ref="tfoot" minOccurs="0"/>
+                    <xs:element ref="tbody" maxOccurs="unbounded"/>
+                </xs:sequence>
+                <xs:choice>
+                    <xs:element ref="tr" maxOccurs="unbounded"/>
+                </xs:choice>
+            </xs:choice>
+        </xs:sequence>
+    </xs:group>
+    
+    <xs:complexType name="table.type">
+        <xs:group ref="table.content"/>
+        <xs:attributeGroup ref="table.attlist"/>
+    </xs:complexType>
+    
+    <xs:element name="table" type="table.type"/>
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-text-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-text-1.xsd
new file mode 100644
index 00000000000..c0201aad05b
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/XHTML/xhtml-text-1.xsd
@@ -0,0 +1,70 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
+           targetNamespace="http://www.w3.org/1999/xhtml"
+           xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      Textual Content
+      This is the XML Schema Text module for XHTML
+
+      The Text module includes declarations for all core
+      text container elements and their attributes.
+    
+        +  block phrasal
+        +  block structural
+        +  inline phrasal
+        +  inline structural
+      
+      $Id$
+    </xs:documentation>
+    <xs:documentation source="xhtml-copyright-1.xsd"/>
+    <xs:documentation 
+        source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_textmodule"/>          
+  </xs:annotation>
+
+  <xs:include schemaLocation="xhtml-blkphras-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Block Phrasal module
+        Elements defined here:
+
+          * address, blockquote, pre, h1, h2, h3, h4, h5, h6
+    </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-blkstruct-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Block Structural module 
+        Elements defined here:
+
+          * div, p
+    </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-inlphras-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Inline Phrasal module
+        Elements defined here:
+
+          * abbr, acronym, cite, code, dfn, em, kbd, q, samp, strong, var
+    </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-inlstruct-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Inline Structural module 
+        Elements defined here:
+
+          * br,span
+    </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+
+</xs:schema>
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/dc.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/dc.xsd
new file mode 100644
index 00000000000..974c75ccc1d
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/dc.xsd
@@ -0,0 +1,123 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns="http://purl.org/dc/elements/1.1/" targetNamespace="http://purl.org/dc/elements/1.1/" elementFormDefault="qualified" attributeFormDefault="unqualified">
+
+  <xs:annotation>
+    <xs:documentation xml:lang="en">
+      DCMES 1.1 XML Schema
+      XML Schema for http://purl.org/dc/elements/1.1/ namespace
+
+      Created 2003-04-02
+
+      Created by 
+
+      Tim Cole (t-cole3@uiuc.edu)
+      Tom Habing (thabing@uiuc.edu)
+      Jane Hunter (jane@dstc.edu.au)
+      Pete Johnston (p.johnston@ukoln.ac.uk),
+      Carl Lagoze (lagoze@cs.cornell.edu)
+
+      This schema declares XML elements for the 15 DC elements from the
+      http://purl.org/dc/elements/1.1/ namespace.
+
+      It defines a complexType SimpleLiteral which permits mixed content 
+      and makes the xml:lang attribute available. It disallows child elements by
+      use of minOcccurs/maxOccurs.
+
+      However, this complexType does permit the derivation of other complexTypes
+      which would permit child elements.
+
+      All elements are declared as substitutable for the abstract element any, 
+      which means that the default type for all elements is dc:SimpleLiteral.
+
+    </xs:documentation>
+
+  </xs:annotation>
+
+
+  <xs:import namespace="http://www.w3.org/XML/1998/namespace" schemaLocation="xml.xsd">
+  </xs:import>
+
+  <xs:complexType name="SimpleLiteral" mixed="true">
+        <xs:annotation>
+        <xs:documentation xml:lang="en">
+            This is the default type for all of the DC elements.
+            It permits text content only with optional
+            xml:lang attribute.
+            Text is allowed because mixed="true", but sub-elements
+            are disallowed because minOccurs="0" and maxOccurs="0" 
+            are on the xs:any tag.
+
+    	    This complexType allows for restriction or extension permitting
+            child elements.
+    	</xs:documentation>
+		<xs:documentation xml:lang="en">This structure has been altered from 
+			its original declaration of:
+			<xs:complexType name="SimpleLiteral">
+				<xs:complexContent mixed="true">
+					<xs:restriction base="xs:anyType">
+						<xs:sequence>
+							<xs:any processContents="lax" minOccurs="0" maxOccurs="0"/>
+						</xs:sequence>
+						<xs:attribute ref="xml:lang" use="optional"/>
+					</xs:restriction>
+				</xs:complexContent>
+			</xs:complexType>		
+			Such a restriction from the xs:anyType is unnecessary, as this is 
+			implicit in all types. Further, the xs:any element with a minOccurs="0" 
+			causes issues with Apache Xml Beans. This declaration allows for an 
+			equivalent content model while still allowing extensions from this type
+			to declare elements to allow for true mixed content.
+		</xs:documentation>
+  	</xs:annotation>
+     <xs:attribute ref="xml:lang" use="optional"/>
+  </xs:complexType>
+
+  <xs:element name="any" type="SimpleLiteral" abstract="true"/>
+
+  <xs:element name="title" substitutionGroup="any"/>
+  <xs:element name="creator" substitutionGroup="any"/>
+  <xs:element name="subject" substitutionGroup="any"/>
+  <xs:element name="description" substitutionGroup="any"/>
+  <xs:element name="publisher" substitutionGroup="any"/>
+  <xs:element name="contributor" substitutionGroup="any"/>
+  <xs:element name="date" substitutionGroup="any"/>
+  <xs:element name="type" substitutionGroup="any"/>
+  <xs:element name="format" substitutionGroup="any"/>
+  <xs:element name="identifier" substitutionGroup="any"/>
+  <xs:element name="source" substitutionGroup="any"/>
+  <xs:element name="language" substitutionGroup="any"/>
+  <xs:element name="relation" substitutionGroup="any"/>
+  <xs:element name="coverage" substitutionGroup="any"/>
+  <xs:element name="rights" substitutionGroup="any"/>
+
+  <xs:group name="elementsGroup">
+  	<xs:annotation>
+    	<xs:documentation xml:lang="en">
+    	    This group is included as a convenience for schema authors
+            who need to refer to all the elements in the 
+            http://purl.org/dc/elements/1.1/ namespace.
+    	</xs:documentation>
+  	</xs:annotation>
+
+  <xs:sequence>
+    <xs:choice minOccurs="0" maxOccurs="unbounded">
+      <xs:element ref="any"/>
+    </xs:choice>
+    </xs:sequence>
+  </xs:group>
+
+  <xs:complexType name="elementContainer">
+  	<xs:annotation>
+    	<xs:documentation xml:lang="en">
+    		This complexType is included as a convenience for schema authors who need to define a root
+    		or container element for all of the DC elements.
+    	</xs:documentation>
+  	</xs:annotation>
+
+    <xs:choice>
+      <xs:group ref="elementsGroup"/>
+    </xs:choice>
+  </xs:complexType>
+
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/dcmitype.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/dcmitype.xsd
new file mode 100644
index 00000000000..2433cf0bded
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/dcmitype.xsd
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns="http://purl.org/dc/dcmitype/" targetNamespace="http://purl.org/dc/dcmitype/" elementFormDefault="qualified" attributeFormDefault="unqualified">
+
+  <xs:annotation>
+    <xs:documentation xml:lang="en">
+      DCMI Type Vocabulary XML Schema
+      XML Schema for http://purl.org/dc/dcmitype/ namespace
+
+      Created 2003-04-02
+
+      Created by 
+
+      Tim Cole (t-cole3@uiuc.edu)
+      Tom Habing (thabing@uiuc.edu)
+      Jane Hunter (jane@dstc.edu.au)
+      Pete Johnston (p.johnston@ukoln.ac.uk),
+      Carl Lagoze (lagoze@cs.cornell.edu)
+
+      This schema defines a simpleType which enumerates
+      the allowable values for the DCMI Type Vocabulary.
+    </xs:documentation>
+
+ 
+  </xs:annotation>
+
+
+  <xs:simpleType name="DCMIType">
+     <xs:union>
+        <xs:simpleType>
+           <xs:restriction base="xs:Name">
+		<xs:enumeration value="Collection"/>
+		<xs:enumeration value="Dataset"/>
+		<xs:enumeration value="Event"/>
+		<xs:enumeration value="Image"/>
+		<xs:enumeration value="InteractiveResource"/>
+		<xs:enumeration value="Service"/>
+		<xs:enumeration value="Software"/>
+		<xs:enumeration value="Sound"/>
+		<xs:enumeration value="Text"/>
+		<xs:enumeration value="PhysicalObject"/>
+            </xs:restriction>
+        </xs:simpleType> 
+     </xs:union>
+  </xs:simpleType>
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/dcterms.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/dcterms.xsd
new file mode 100644
index 00000000000..d306002b5a7
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/dcterms.xsd
@@ -0,0 +1,322 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcmitype="http://purl.org/dc/dcmitype/" targetNamespace="http://purl.org/dc/terms/" xmlns="http://purl.org/dc/terms/" elementFormDefault="qualified" attributeFormDefault="unqualified">
+
+  <xs:annotation>
+    <xs:documentation xml:lang="en">
+      DCterms XML Schema
+      XML Schema for http://purl.org/dc/terms/ namespace
+
+      Created 2003-04-02
+
+      Created by 
+
+      Tim Cole (t-cole3@uiuc.edu)
+      Tom Habing (thabing@uiuc.edu)
+      Jane Hunter (jane@dstc.edu.au)
+      Pete Johnston (p.johnston@ukoln.ac.uk),
+      Carl Lagoze (lagoze@cs.cornell.edu)
+
+      This schema declares XML elements for the DC elements and
+      DC element refinements from the http://purl.org/dc/terms/ namespace.
+      
+      It reuses the complexType dc:SimpleLiteral, imported from the dc.xsd
+      schema, which permits simple element content, and makes the xml:lang
+      attribute available.
+
+      This complexType permits the derivation of other complexTypes
+      which would permit child elements.
+
+      DC elements are declared as substitutable for the abstract element dc:any, and 
+      DC element refinements are defined as substitutable for the base elements 
+      which they refine.
+
+      This means that the default type for all XML elements (i.e. all DC elements and 
+      element refinements) is dc:SimpleLiteral.
+
+      Encoding schemes are defined as complexTypes which are restrictions
+      of the dc:SimpleLiteral complexType. These complexTypes restrict 
+      values to an appropriates syntax or format using data typing,
+      regular expressions, or enumerated lists.
+  
+      In order to specify one of these encodings an xsi:type attribute must 
+      be used in the instance document.
+
+      Also, note that one shortcoming of this approach is that any type can be 
+      applied to any of the elements or refinements.  There is no convenient way
+      to restrict types to specific elements using this approach.
+
+    </xs:documentation>
+
+  </xs:annotation>
+
+
+  <xs:import namespace="http://www.w3.org/XML/1998/namespace" schemaLocation="xml.xsd">
+  </xs:import>
+
+   <xs:import namespace="http://purl.org/dc/elements/1.1/" schemaLocation="dc.xsd"/>
+
+   <xs:import namespace="http://purl.org/dc/dcmitype/" schemaLocation="dcmitype.xsd"/>
+
+   <xs:element name="alternative" substitutionGroup="dc:title"/>
+
+   <xs:element name="tableOfContents" substitutionGroup="dc:description"/>
+   <xs:element name="abstract" substitutionGroup="dc:description"/>
+
+   <xs:element name="created" substitutionGroup="dc:date"/>
+   <xs:element name="valid" substitutionGroup="dc:date"/>
+   <xs:element name="available" substitutionGroup="dc:date"/>
+   <xs:element name="issued" substitutionGroup="dc:date"/>
+   <xs:element name="modified" substitutionGroup="dc:date"/>
+   <xs:element name="dateAccepted" substitutionGroup="dc:date"/>
+   <xs:element name="dateCopyrighted" substitutionGroup="dc:date"/>
+   <xs:element name="dateSubmitted" substitutionGroup="dc:date"/>
+
+   <xs:element name="extent" substitutionGroup="dc:format"/>
+   <xs:element name="medium" substitutionGroup="dc:format"/>
+
+   <xs:element name="isVersionOf" substitutionGroup="dc:relation"/>
+   <xs:element name="hasVersion" substitutionGroup="dc:relation"/>
+   <xs:element name="isReplacedBy" substitutionGroup="dc:relation"/>
+   <xs:element name="replaces" substitutionGroup="dc:relation"/>
+   <xs:element name="isRequiredBy" substitutionGroup="dc:relation"/>
+   <xs:element name="requires" substitutionGroup="dc:relation"/>
+   <xs:element name="isPartOf" substitutionGroup="dc:relation"/>
+   <xs:element name="hasPart" substitutionGroup="dc:relation"/>
+   <xs:element name="isReferencedBy" substitutionGroup="dc:relation"/>
+   <xs:element name="references" substitutionGroup="dc:relation"/>
+   <xs:element name="isFormatOf" substitutionGroup="dc:relation"/>
+   <xs:element name="hasFormat" substitutionGroup="dc:relation"/>
+   <xs:element name="conformsTo" substitutionGroup="dc:relation"/>
+
+   <xs:element name="spatial" substitutionGroup="dc:coverage"/>
+   <xs:element name="temporal" substitutionGroup="dc:coverage"/>
+
+   <xs:element name="audience" substitutionGroup="dc:any"/>
+
+   <xs:element name="mediator" substitutionGroup="audience"/>
+   <xs:element name="educationLevel" substitutionGroup="audience"/>
+
+   <xs:element name="accessRights" substitutionGroup="dc:rights"/>
+
+   <xs:element name="bibliographicCitation" substitutionGroup="dc:identifier"/>
+
+  <xs:complexType name="LCSH">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="MESH">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="DDC">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="LCC">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="UDC">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="Period">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="W3CDTF">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+           <xs:union memberTypes="xs:gYear xs:gYearMonth xs:date xs:dateTime"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType> 
+
+  <xs:complexType name="DCMIType">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="dcmitype:DCMIType"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="IMT">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="URI">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:anyURI"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType> 
+
+  <xs:complexType name="ISO639-2">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="RFC1766">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:language"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="RFC3066">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:language"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="Point">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="ISO3166">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="Box">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:complexType name="TGN">
+   <xs:simpleContent>
+    <xs:restriction base="dc:SimpleLiteral">
+        <xs:simpleType>
+          <xs:restriction base="xs:string"/>
+        </xs:simpleType>
+        <xs:attribute ref="xml:lang" use="prohibited"/>
+    </xs:restriction>
+   </xs:simpleContent>
+  </xs:complexType>
+
+  <xs:group name="elementsAndRefinementsGroup">
+  	<xs:annotation>
+    	<xs:documentation xml:lang="en">
+    		This group is included as a convenience for schema authors
+            who need to refer to all the DC elements and element refinements 
+            in the http://purl.org/dc/elements/1.1/ and 
+            http://purl.org/dc/terms namespaces. 
+            N.B. Refinements available via substitution groups.
+    	</xs:documentation>
+  	</xs:annotation>
+
+  <xs:sequence>
+    <xs:choice minOccurs="0" maxOccurs="unbounded">
+	<xs:element ref="dc:any"/>
+    </xs:choice>
+  </xs:sequence>
+  </xs:group>	
+
+  <xs:complexType name="elementOrRefinementContainer">
+  	<xs:annotation>
+    	<xs:documentation xml:lang="en">
+    		This is included as a convenience for schema authors who need to define a root
+    		or container element for all of the DC elements and element refinements.
+    	</xs:documentation>
+  	</xs:annotation>
+
+    <xs:choice>
+      <xs:group ref="elementsAndRefinementsGroup"/>
+    </xs:choice>
+  </xs:complexType>
+
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11-model-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11-model-1.xsd
new file mode 100644
index 00000000000..528ce98c7c3
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11-model-1.xsd
@@ -0,0 +1,490 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2009 DDI Alliance, DDI 3.1, 2009-10-18
+
+This file is part of DDI 3.1 XML Schema.
+
+DDI 3.1 XML Schema is free software: you can redistribute it and/or modify
+it under the terms of the GNU Lesser General Public License as published by the
+Free Software Foundation, either version 3 of the License, or (at your
+option) any later version.
+
+DDI 3.1 XML Schema is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+for more details.
+
+You should have received a copy of the GNU Lesser General Public License along
+with DDI 3.1 XML Schema. If not, see <http://www.gnu.org/licenses/>.
+
+-->
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" targetNamespace="http://www.w3.org/1999/xhtml" xmlns="http://www.w3.org/1999/xhtml">
+
+  <xs:annotation>
+    <xs:documentation>
+      This is the XML Schema module of common content models for XHTML11
+      $Id: ddi-xhtml11-model-1.xsd,v 1.1 2007/02/08 16:03:11 jgager Exp $
+    </xs:documentation>
+    <xs:documentation source="XHTML/xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+  <xs:annotation>
+    <xs:documentation>
+      XHTML Document Model
+
+      This module describes the groupings of elements/attributes that make up
+      common content models for XHTML elements.
+
+      XHTML has following basic content models:
+
+          Inline.mix;          character-level elements
+          Block.mix;           block-like elements, eg., paragraphs and lists
+          Flow.mix;            any block or inline elements
+          HeadOpts.mix;        Head Elements
+          InlinePre.mix;     Special class for pre content model
+          InlineNoAnchor.mix;  Content model for Anchor
+
+      Any groups declared in this module may be used
+      to create element content models, but the above are
+      considered 'global' (insofar as that term applies here).
+
+      XHTML has the following Attribute Groups
+           Core.extra.attrib
+           I18n.extra.attrib
+           Common.extra
+
+      The above attribute Groups are considered Global
+
+    </xs:documentation>
+  </xs:annotation>
+
+  <xs:attributeGroup name="I18n.extra.attrib">
+    <xs:annotation>
+        <xs:documentation>
+           Extended I18n attribute
+        </xs:documentation>
+    </xs:annotation>
+    <xs:attributeGroup ref="dir.attrib">
+        <xs:annotation>
+          <xs:documentation>
+            "dir" Attribute from Bi Directional Text (bdo) Module
+          </xs:documentation>
+        </xs:annotation>
+    </xs:attributeGroup>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="Common.extra">
+    <xs:annotation>
+       <xs:documentation>
+           Extended Common Attributes
+       </xs:documentation>
+    </xs:annotation>
+    <xs:attributeGroup ref="style.attrib">
+       <xs:annotation>
+         <xs:documentation>
+           "style" attribute from Inline Style Module
+         </xs:documentation>
+       </xs:annotation>
+     </xs:attributeGroup>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="Core.extra.attrib">
+      <xs:annotation>
+        <xs:documentation>
+           Extend Core Attributes
+        </xs:documentation>
+      </xs:annotation>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="Global.core.extra.attrib">
+      <xs:annotation>
+        <xs:documentation>
+           Extended Global Core Attributes
+        </xs:documentation>
+      </xs:annotation>
+  </xs:attributeGroup>
+
+  <xs:attributeGroup name="Global.I18n.extra.attrib">
+      <xs:annotation>
+        <xs:documentation>
+           Extended Global I18n attributes
+        </xs:documentation>
+      </xs:annotation>
+  </xs:attributeGroup>
+
+   <xs:attributeGroup name="Global.Common.extra">
+      <xs:annotation>
+        <xs:documentation>
+           Extended Global Common Attributes
+        </xs:documentation>
+      </xs:annotation>
+   </xs:attributeGroup>
+<!--
+  <xs:group name="HeadOpts.mix">
+    <xs:choice>
+      <xs:element ref="script"/>
+      <xs:element ref="style"/>
+      <xs:element ref="meta"/>
+      <xs:element ref="link"/>
+      <xs:element ref="object"/>
+    </xs:choice>
+  </xs:group>
+-->
+  <!--
+   ins and del are used to denote editing changes
+  -->
+<!--
+  <xs:group name="Edit.class">
+    <xs:choice>
+      <xs:element ref="ins"/>
+      <xs:element ref="del"/>
+    </xs:choice>
+  </xs:group>
+-->
+  <!--
+   script and noscript are used to contain scripts
+   and alternative content
+  -->
+<!--
+  <xs:group name="Script.class">
+    <xs:choice>
+      <xs:element ref="script"/>
+      <xs:element ref="noscript"/>
+    </xs:choice>
+  </xs:group>
+-->
+  <xs:group name="Misc.extra">
+    <xs:choice/>
+  </xs:group>
+
+  <!--
+   These elements are neither block nor inline, and can
+   essentially be used anywhere in the document body.
+  -->
+  <xs:group name="Misc.class">
+    <xs:choice/>
+<!--      <xs:group ref="Edit.class"/>
+      <xs:group ref="Script.class"/>
+      <xs:group ref="Misc.extra"/>
+    </xs:choice>
+ -->
+  </xs:group>
+
+  <!-- Inline Elements -->
+  <xs:group name="InlStruct.class">
+    <xs:choice>
+      <xs:element ref="br"/>
+      <xs:element ref="span"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="InlPhras.class">
+    <xs:choice>
+      <xs:element ref="em"/>
+      <xs:element ref="strong"/>
+      <xs:element ref="dfn"/>
+      <xs:element ref="code"/>
+      <xs:element ref="samp"/>
+      <xs:element ref="kbd"/>
+      <xs:element ref="var"/>
+      <xs:element ref="cite"/>
+      <xs:element ref="abbr"/>
+      <xs:element ref="acronym"/>
+      <xs:element ref="q"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="InlPres.class">
+    <xs:choice>
+      <xs:element ref="tt"/>
+      <xs:element ref="i"/>
+      <xs:element ref="b"/>
+      <xs:element ref="big"/>
+      <xs:element ref="small"/>
+      <xs:element ref="sub"/>
+      <xs:element ref="sup"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="I18n.class">
+    <xs:sequence>
+      <xs:element ref="bdo"/>
+    </xs:sequence>
+  </xs:group>
+
+  <xs:group name="Anchor.class">
+    <xs:sequence>
+      <xs:element ref="a"/>
+    </xs:sequence>
+  </xs:group>
+<!--
+  <xs:group name="InlSpecial.class">
+    <xs:choice>
+      <xs:element ref="img"/>
+      <xs:element ref="map"/>
+      <xs:element ref="object"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="InlForm.class">
+    <xs:choice>
+      <xs:element ref="input"/>
+      <xs:element ref="select"/>
+      <xs:element ref="textarea"/>
+      <xs:element ref="label"/>
+      <xs:element ref="button"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="Inline.extra">
+    <xs:choice/>
+  </xs:group>
+
+  <xs:group name="Ruby.class">
+    <xs:sequence>
+      <xs:element ref="ruby"/>
+    </xs:sequence>
+  </xs:group>
+-->
+  <!--
+   Inline.class includes all inline elements,
+   used as a component in mixes
+  -->
+
+  <xs:group name="Inline.class">
+    <xs:choice>
+      <xs:group ref="InlStruct.class"/>
+      <xs:group ref="InlPhras.class"/>
+      <xs:group ref="InlPres.class"/>
+      <xs:group ref="I18n.class"/>
+      <xs:group ref="Anchor.class"/>
+<!--      <xs:group ref="InlSpecial.class"/>
+      <xs:group ref="InlForm.class"/>
+      <xs:group ref="Ruby.class"/>
+      <xs:group ref="Inline.extra"/> -->
+    </xs:choice>
+  </xs:group>
+
+  <!--
+     InlNoRuby.class includes all inline elements
+     except ruby
+  -->
+  <xs:group name="InlNoRuby.class">
+    <xs:choice>
+      <xs:group ref="InlStruct.class"/>
+      <xs:group ref="InlPhras.class"/>
+      <xs:group ref="InlPres.class"/>
+      <xs:group ref="I18n.class"/>
+      <xs:group ref="Anchor.class"/>
+<!--      <xs:group ref="InlSpecial.class"/> -->
+<!--      <xs:group ref="InlForm.class"/> -->
+<!--      <xs:group ref="Inline.extra"/> -->
+    </xs:choice>
+  </xs:group>
+
+
+  <!--
+    InlinePre.mix
+    Used as a component in pre model
+  -->
+  <xs:group name="InlinePre.mix">
+    <xs:choice>
+      <xs:group ref="InlStruct.class"/>
+      <xs:group ref="InlPhras.class"/>
+      <xs:element ref="tt"/>
+      <xs:element ref="i"/>
+      <xs:element ref="b"/>
+      <xs:group ref="I18n.class"/>
+      <xs:group ref="Anchor.class"/>
+<!--      <xs:element ref="script"/>
+      <xs:element ref="map"/>
+      <xs:group ref="Inline.extra"/> -->
+    </xs:choice>
+  </xs:group>
+
+  <!--
+    InlNoAnchor.class includes all non-anchor inlines,
+    used as a component in mixes
+  -->
+  <xs:group name="InlNoAnchor.class">
+    <xs:choice>
+      <xs:group ref="InlStruct.class"/>
+      <xs:group ref="InlPhras.class"/>
+      <xs:group ref="InlPres.class"/>
+      <xs:group ref="I18n.class"/>
+<!--      <xs:group ref="InlSpecial.class"/>
+      <xs:group ref="InlForm.class"/>
+      <xs:group ref="Ruby.class"/>
+      <xs:group ref="Inline.extra"/> -->
+    </xs:choice>
+  </xs:group>
+
+  <!--
+    InlNoAnchor.mix includes all non-anchor inlines
+  -->
+  <xs:group name="InlNoAnchor.mix">
+    <xs:choice>
+      <xs:group ref="InlNoAnchor.class"/>
+    </xs:choice>
+  </xs:group>
+
+  <!--
+    Inline.mix includes all inline elements, including Misc.class
+  -->
+  <xs:group name="Inline.mix">
+    <xs:choice>
+      <xs:group ref="Inline.class"/>
+    </xs:choice>
+  </xs:group>
+
+  <!--
+   InlNoRuby.mix includes all of inline.mix elements
+   except ruby
+  -->
+<!--
+  <xs:group name="InlNoRuby.mix">
+    <xs:choice>
+      <xs:group ref="InlNoRuby.class"/>
+      <xs:group ref="Misc.class"/>
+    </xs:choice>
+  </xs:group>
+-->
+
+  <!--
+    In the HTML 4 DTD, heading and list elements were included
+    in the block group. The Heading.class and
+    List.class groups must now be included explicitly
+    on element declarations where desired.
+  -->
+  <xs:group name="Heading.class">
+    <xs:choice>
+      <xs:element ref="h1"/>
+      <xs:element ref="h2"/>
+      <xs:element ref="h3"/>
+      <xs:element ref="h4"/>
+      <xs:element ref="h5"/>
+      <xs:element ref="h6"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="List.class">
+    <xs:choice>
+      <xs:element ref="ul"/>
+      <xs:element ref="ol"/>
+      <xs:element ref="dl"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="Table.class">
+    <xs:choice>
+      <xs:element ref="table"/>
+    </xs:choice>
+  </xs:group>
+<!--
+  <xs:group name="Form.class">
+    <xs:choice>
+      <xs:element ref="form"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="Fieldset.class">
+    <xs:choice>
+      <xs:element ref="fieldset"/>
+    </xs:choice>
+  </xs:group>
+-->
+  <xs:group name="BlkStruct.class">
+    <xs:choice>
+      <xs:element ref="p"/>
+      <xs:element ref="div"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="BlkPhras.class">
+    <xs:choice>
+      <xs:element ref="pre"/>
+      <xs:element ref="blockquote"/>
+      <xs:element ref="address"/>
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="BlkPres.class">
+    <xs:sequence>
+      <xs:element ref="hr"/>
+    </xs:sequence>
+  </xs:group>
+
+  <xs:group name="BlkSpecial.class">
+    <xs:choice>
+      <xs:group ref="Table.class"/>
+<!--      <xs:group ref="Form.class"/>
+      <xs:group ref="Fieldset.class"/> -->
+    </xs:choice>
+  </xs:group>
+
+  <xs:group name="Block.extra">
+    <xs:choice/>
+  </xs:group>
+
+  <!--
+    Block.class includes all block elements,
+    used as an component in mixes
+  -->
+  <xs:group name="Block.class">
+    <xs:choice>
+      <xs:group ref="BlkStruct.class"/>
+      <xs:group ref="BlkPhras.class"/>
+      <xs:group ref="BlkPres.class"/>
+      <xs:group ref="BlkSpecial.class"/>
+    </xs:choice>
+  </xs:group>
+
+  <!--
+   Block.mix includes all block elements plus %Misc.class;
+  -->
+  <xs:group name="Block.mix">
+    <xs:choice>
+      <xs:group ref="Heading.class"/>
+      <xs:group ref="List.class"/>
+      <xs:group ref="Block.class"/>
+    </xs:choice>
+  </xs:group>
+
+  <!--
+    All Content Elements
+    Flow.mix includes all text content, block and inline
+    Note that the "any" element included here allows us
+    to add data from any other namespace, a necessity
+    for compound document creation.
+    Note however that it is not possible to add
+    to any head level element without further
+    modification. To add RDF metadata to the head
+    of a document, modify the structure module.
+  -->
+  <xs:group name="Flow.mix">
+    <xs:choice>
+      <xs:group ref="Heading.class"/>
+      <xs:group ref="List.class"/>
+      <xs:group ref="Block.class"/>
+      <xs:group ref="Inline.class"/>
+    </xs:choice>
+  </xs:group>
+
+
+  <!--
+    BlkNoForm.mix includes all non-form block elements,
+       plus Misc.class
+  -->
+  <xs:group name="BlkNoForm.mix">
+    <xs:choice>
+      <xs:group ref="Heading.class"/>
+      <xs:group ref="List.class"/>
+      <xs:group ref="BlkStruct.class"/>
+      <xs:group ref="BlkPhras.class"/>
+      <xs:group ref="BlkPres.class"/>
+      <xs:group ref="Table.class"/>
+    </xs:choice>
+  </xs:group>
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11-modules-1.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11-modules-1.xsd
new file mode 100644
index 00000000000..fcf2a1ae709
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11-modules-1.xsd
@@ -0,0 +1,569 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2009 DDI Alliance, DDI 3.1, 2009-10-18
+
+This file is part of DDI 3.1 XML Schema.
+
+DDI 3.1 XML Schema is free software: you can redistribute it and/or modify
+it under the terms of the GNU Lesser General Public License as published by the
+Free Software Foundation, either version 3 of the License, or (at your
+option) any later version.
+
+DDI 3.1 XML Schema is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+for more details.
+
+You should have received a copy of the GNU Lesser General Public License along
+with DDI 3.1 XML Schema. If not, see <http://www.gnu.org/licenses/>.
+
+-->
+<xs:schema targetNamespace="http://www.w3.org/1999/xhtml" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns="http://www.w3.org/1999/xhtml" blockDefault="#all">
+
+  <xs:annotation>
+    <xs:documentation>
+      This schema includes all modules for XHTML1.1 Document Type.
+      $Id: ddi-xhtml11-modules-1.xsd,v 1.1 2007/02/08 16:03:11 jgager Exp $
+    </xs:documentation>
+    <xs:documentation source="XHTML/xhtml-copyright-1.xsd"/>
+  </xs:annotation>
+
+  <xs:annotation>
+    <xs:documentation>
+     This schema includes all modules (and redefinitions)
+     for XHTML1.1 Document Type.
+     XHTML1.1 Document Type includes the following Modules
+
+       XHTML Core modules (Required for XHTML Family Conformance)
+            +  text
+            +  hypertext
+            +  lists
+            +  structure
+
+       Other XHTML modules
+            +  Edit
+            +  Bdo
+            +  Presentational
+            +  Link
+            +  Meta
+            +  Base
+            +  Scripting
+            +  Style
+            +  Image
+            +  Applet
+            +  Object
+            +  Param (Applet/Object modules require Param Module)
+            +  Tables
+            +  Forms
+            +  Client side image maps
+            +  Server side image maps
+
+    </xs:documentation>
+  </xs:annotation>
+
+  <xs:include schemaLocation="XHTML/xhtml-framework-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Schema Framework Component Modules:
+            +  notations
+            +  datatypes
+            +  common attributes
+            +  character entities
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_commonatts"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="XHTML/xhtml-text-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Text module
+
+        The Text module includes declarations for all core
+        text container elements and their attributes.
+
+            +  block phrasal
+            +  block structural
+            +  inline phrasal
+            +  inline structural
+
+        Elements defined here:
+          * address, blockquote, pre, h1, h2, h3, h4, h5, h6
+          * div, p
+          * abbr, acronym, cite, code, dfn, em, kbd, q, samp, strong, var
+          * br, span
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_textmodule"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="XHTML/xhtml-hypertext-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+         Hypertext module
+
+         Elements defined here:
+          * a
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_hypertextmodule"/>
+    </xs:annotation>
+  </xs:include>
+
+<!--
+  <xs:redefine schemaLocation="XHTML/xhtml-hypertext-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+         Hypertext module
+
+         Elements defined here:
+          * a
+      </xs:documentation>
+      <xs:documentation
+        source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_hypertextmodule"/>
+    </xs:annotation>
+    <xs:attributeGroup name="a.attlist">
+       <xs:attributeGroup ref="a.attlist" />
+       <xs:attributeGroup ref="a.csim.attlist">
+          <xs:annotation>
+            <xs:documentation>
+              Redefinition by Client Side Image Map Module
+            </xs:documentation>
+          </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="a.events.attlist">
+          <xs:annotation>
+            <xs:documentation>
+              Redefinition by XHTML Event Attribute Module
+            </xs:documentation>
+          </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+  </xs:redefine>
+-->
+
+  <xs:include schemaLocation="XHTML/xhtml-list-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Lists module
+
+        Elements defined here:
+          * dt, dd, dl, ol, ul, li
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/2001/REC-xhtml-modularization-20010410/abstract_modules.html#s_listmodule"/>
+    </xs:annotation>
+  </xs:include>
+<!--
+ <xs:include schemaLocation="xhtml-edit-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Edit module
+
+        Elements defined here:
+          * ins, del
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_editmodule"/>
+    </xs:annotation>
+  </xs:include>
+-->
+  <xs:include schemaLocation="XHTML/xhtml-bdo-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Bidirectional element module
+
+        Elements defined here:
+          * bdo
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_bdomodule"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="XHTML/xhtml-pres-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Presentational module
+
+         Elements defined here:
+           * hr, b, big, i, small,sub, sup, tt
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_presentationmodule"/>
+    </xs:annotation>
+  </xs:include>
+<!--
+  <xs:include schemaLocation="XHTML/xhtml-link-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Link module
+
+        Elements defined here:
+          * link
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_linkmodule"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-meta-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Meta module
+
+        Elements defined here:
+        * meta
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_metamodule"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-base-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Base module
+
+        Elements defined here:
+          * base
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_basemodule"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-script-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Scripting module
+
+        Elements defined here:
+          * script, noscript
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_scriptmodule"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-style-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Style module
+
+        Elements defined here:
+          * style
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_stylemodule"/>
+    </xs:annotation>
+  </xs:include>
+-->
+  <xs:include schemaLocation="XHTML/xhtml-inlstyle-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Style attribute module
+
+        Attribute defined here:
+          * style
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_styleattributemodule"/>
+    </xs:annotation>
+  </xs:include>
+<!--
+  <xs:redefine schemaLocation="xhtml-image-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Image module
+
+        Elements defined here:
+          * img
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_imagemodule"/>
+    </xs:annotation>
+    <xs:attributeGroup name="img.attlist">
+       <xs:attributeGroup ref="img.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original Image Attributes (in Image Module)
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="img.csim.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Redefinition by Client Side Image Map Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="img.ssimap.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Redefinition by Server Side Image Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+  </xs:redefine>
+
+  <xs:redefine schemaLocation="XHTML/xhtml-csismap-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Client-side mage maps module
+
+        Elements defined here:
+          * area, map
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_imapmodule"/>
+    </xs:annotation>
+    <xs:attributeGroup name="area.attlist">
+       <xs:attributeGroup ref="area.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original Area Attributes (in CSI Module)
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="area.events.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Redefinition by Events Attribute Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+  </xs:redefine>
+
+  <xs:include schemaLocation="xhtml-ssismap-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+       Server-side image maps module
+
+        Attributes defined here:
+          * ismap on img
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_servermapmodule"/>
+    </xs:annotation>
+ </xs:include>
+
+  <xs:redefine schemaLocation="xhtml-object-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Object module
+
+        Elements defined here:
+          * object
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_objectmodule"/>
+    </xs:annotation>
+    <xs:attributeGroup name="object.attlist">
+       <xs:attributeGroup ref="object.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original Object Attlist
+              </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="object.csim.attlist">
+           <xs:annotation>
+              <xs:documentation>
+                Redefinition by Client Image Map Module
+              </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+  </xs:redefine>
+
+  <xs:include schemaLocation="xhtml-param-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Param module
+
+        Elements defined here:
+          * param
+      </xs:documentation>
+    </xs:annotation>
+  </xs:include>
+-->
+  <xs:include schemaLocation="XHTML/xhtml-table-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Tables module
+
+        Elements defined here:
+          * table, caption, thead, tfoot, tbody, colgroup, col, tr, th, td
+      </xs:documentation>
+      <xs:documentation source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_tablemodule"/>
+    </xs:annotation>
+  </xs:include>
+<!--
+  <xs:redefine schemaLocation="xhtml-form-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Forms module
+
+        Elements defined here:
+          * form, label, input, select, optgroup, option,
+          * textarea, fieldset, legend, button
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_extformsmodule"/>
+    </xs:annotation>
+    <xs:attributeGroup name="form.attlist">
+       <xs:annotation>
+          <xs:documentation>
+            Changes to XHTML Form Attlist
+          </xs:documentation>
+       </xs:annotation>
+       <xs:attributeGroup ref="form.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original Form Attributes (declared in Forms Module)
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="form.events.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                XHTML Events Module - Attribute additions
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+
+    <xs:attributeGroup name="input.attlist">
+       <xs:annotation>
+          <xs:documentation>
+            Changes to XHTML Form Input Element
+          </xs:documentation>
+       </xs:annotation>
+       <xs:attributeGroup ref="input.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original Input Attributes (in Forms Module)
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="input.csim.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Redefinition by Client Side Image Map Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="input.ssimap.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Redefinition by Server Side Image Map Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="input.events.attlist">
+           <xs:annotation>
+             <xs:documentation>
+               Redefinition by Event Attribute Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+
+
+    <xs:attributeGroup name="label.attlist">
+       <xs:attributeGroup ref="label.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original Label Attributes (in Forms Module)
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="label.events.attlist">
+           <xs:annotation>
+             <xs:documentation>
+               Redefinition by Event Attribute Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+
+    <xs:attributeGroup name="select.attlist">
+       <xs:attributeGroup ref="select.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original Select Attributes (in Forms Module)
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="select.events.attlist">
+           <xs:annotation>
+             <xs:documentation>
+               Redefinition by Event Attribute Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+
+    <xs:attributeGroup name="textarea.attlist">
+       <xs:attributeGroup ref="textarea.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original TextArea Attributes (in Forms Module)
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+       <xs:attributeGroup ref="textarea.events.attlist">
+           <xs:annotation>
+             <xs:documentation>
+               Redefinition by Event Attribute Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+
+    <xs:attributeGroup name="button.attlist">
+       <xs:attributeGroup ref="button.attlist">
+           <xs:annotation>
+             <xs:documentation>
+                Original Button Attributes (in Forms Module)
+             </xs:documentation>
+           </xs:annotation>
+        </xs:attributeGroup>
+        <xs:attributeGroup ref="button.events.attlist">
+           <xs:annotation>
+             <xs:documentation>
+               Redefinition by Event Attribute Module
+             </xs:documentation>
+           </xs:annotation>
+       </xs:attributeGroup>
+    </xs:attributeGroup>
+  </xs:redefine>
+
+  <xs:include schemaLocation="xhtml-ruby-basic-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        Ruby module
+
+        Elements defined here:
+          * ruby, rbc, rtc, rb, rt, rp
+
+        Note that either Ruby or Basic Ruby should be used but not both
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/2001/REC-ruby-20010531/#simple-ruby1"/>
+    </xs:annotation>
+  </xs:include>
+
+  <xs:include schemaLocation="xhtml-events-1.xsd">
+    <xs:annotation>
+      <xs:documentation>
+        XHTML Events Modules
+
+        Attributes defined here:
+          XHTML Event Types
+      </xs:documentation>
+      <xs:documentation
+         source="http://www.w3.org/TR/xhtml-modularization/abstract_modules.html#s_intrinsiceventsmodule"/>
+    </xs:annotation>
+  </xs:include>
+-->
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11.xsd
new file mode 100644
index 00000000000..bc4e5b010e9
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi-xhtml11.xsd
@@ -0,0 +1,115 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2009 DDI Alliance, DDI 3.1, 2009-10-18
+
+This file is part of DDI 3.1 XML Schema.
+
+DDI 3.1 XML Schema is free software: you can redistribute it and/or modify
+it under the terms of the GNU Lesser General Public License as published by the
+Free Software Foundation, either version 3 of the License, or (at your
+option) any later version.
+
+DDI 3.1 XML Schema is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+for more details.
+
+You should have received a copy of the GNU Lesser General Public License along
+with DDI 3.1 XML Schema. If not, see <http://www.gnu.org/licenses/>.
+
+-->
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns="http://www.w3.org/1999/xhtml" targetNamespace="http://www.w3.org/1999/xhtml" blockDefault="#all">
+   <xs:annotation>
+      <xs:documentation>
+      This is the XML Schema driver for XHTML 1.1.
+      Please use this namespace for XHTML elements:
+
+         "http://www.w3.org/1999/xhtml"
+
+      $Id: ddi-xhtml11.xsd,v 1.1 2007/02/08 16:03:11 jgager Exp $
+    </xs:documentation>
+      <xs:documentation source="XHTML/xhtml-copyright-1.xsd"/>
+   </xs:annotation>
+   <xs:annotation>
+      <xs:documentation>
+      This is XHTML, a reformulation of HTML as a modular XML application
+      The Extensible HyperText Markup Language (XHTML)
+      Copyright ©1998-2003 World Wide Web Consortium
+      (Massachusetts Institute of Technology, Institut National de
+      Recherche en Informatique et en Automatique, Keio University).
+      All Rights Reserved.
+
+      Permission to use, copy, modify and distribute the XHTML Schema
+      modules and their accompanying xs:documentation for any purpose
+      and without fee is hereby granted in perpetuity, provided that the above
+      copyright notice and this paragraph appear in all copies.
+      The copyright holders make no representation about the suitability of
+      these XML Schema modules for any purpose.
+
+      They are provided "as is" without expressed or implied warranty.
+    </xs:documentation>
+   </xs:annotation>
+   <xs:annotation>
+      <xs:documentation>
+      This is the Schema Driver file for XHTML1.1
+      Document Type
+
+     This schema
+        + imports external schemas (xml.xsd)
+        + refedines (and include)s schema modules for XHTML1.1 Document Type.
+        + includes Schema for Named content model for the
+          XHTML1.1 Document Type
+
+        XHTML1.1 Document Type includes the following Modules
+           XHTML Core modules (Required for XHTML Family Conformance)
+            +  text
+            +  hypertext
+            +  lists
+            +  structure
+           Other XHTML modules
+            +  Edit
+            +  Bdo
+            +  Presentational
+            +  Link
+            +  Meta
+            +  Base
+            +  Scripting
+            +  Style
+            +  Image
+            +  Applet
+            +  Object
+            +  Param (Applet/Object modules require Param Module)
+            +  Tables
+            +  Forms
+            +  Client side image maps
+            +  Server side image maps
+            +  Ruby
+    </xs:documentation>
+   </xs:annotation>
+   <xs:import namespace="http://www.w3.org/XML/1998/namespace" schemaLocation="xml.xsd">
+      <xs:annotation>
+         <xs:documentation>
+         This import brings in the XML namespace attributes
+         The XML attributes are used by various modules.
+       </xs:documentation>
+      </xs:annotation>
+   </xs:import>
+   <xs:include schemaLocation="ddi-xhtml11-model-1.xsd">
+      <xs:annotation>
+         <xs:documentation>
+        Document Model module for the XHTML1.1 Document Type.
+        This schema file defines all named models used by XHTML
+        Modularization Framework for XHTML1.1 Document Type
+      </xs:documentation>
+      </xs:annotation>
+   </xs:include>
+   <xs:include schemaLocation="ddi-xhtml11-modules-1.xsd">
+      <xs:annotation>
+         <xs:documentation>
+        Schema that includes all modules (and redefinitions)
+        for XHTML1.1 Document Type.
+      </xs:documentation>
+      </xs:annotation>
+   </xs:include>
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi_codebook_2_5.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi_codebook_2_5.xsd
new file mode 100644
index 00000000000..7f81a1bb327
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/ddi_codebook_2_5.xsd
@@ -0,0 +1,8482 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Copyright (c) 2014 DDI Alliance, DDI 2.5, 2014-01-28
+
+This file is part of DDI 2.5 XML Schema.
+
+DDI 2.5 XML Schema is free software: you can redistribute it and/or modify
+it under the terms of the GNU Lesser General Public License as published by the
+Free Software Foundation, either version 3 of the License, or (at your
+option) any later version.
+
+DDI 2.5 XML Schema is distributed in the hope that it will be useful, but
+WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
+for more details.
+
+You should have received a copy of the GNU Lesser General Public License along
+with DDI 2.5 XML Schema. If not, see <http://www.gnu.org/licenses/>.
+
+-->
+<!--
+DDI 2.5.1 XML Schema modifies DDI 2.5 by easing cardinality on many text 
+entries to better support multi-language content. It provides the element 
+dataFingerprint within fileTxt as a sub-minor, backward compatible bug correction. 
+This element was intended to be included in DDI 2.5. Documentation of new objects
+was expanded.
+
+These changes do not effect the namespace. 2013-11-14.
+
+-->
+<xs:schema xmlns:xhtml="http://www.w3.org/1999/xhtml" xmlns:fn="http://www.w3.org/2005/xpath-functions" xmlns:xs="http://www.w3.org/2001/XMLSchema" xmlns:saxon="http://xml.apache.org/xslt" xmlns="ddi:codebook:2_5" xmlns:dc="http://purl.org/dc/terms/" targetNamespace="ddi:codebook:2_5" elementFormDefault="qualified" attributeFormDefault="unqualified">
+   <xs:annotation>
+      <xs:documentation>
+         This is a w3c Schema "Technical Implementation" of the DDI Conceptual Specification. 
+         This schema is intended for use in producing electronic versions of codebooks for quantitative social science data.
+         Please note that the attribute xml-lang in the a.globals group is an error that was persisted to retain backward compatibility. DO NOT USE THIS ATTRIBUTE. If this attribute has been used, transfer the content to xml:lang.
+   	  </xs:documentation>
+   </xs:annotation>
+
+   <xs:import namespace="http://www.w3.org/XML/1998/namespace" schemaLocation="xml.xsd"/>
+   <xs:import namespace="http://www.w3.org/1999/xhtml" schemaLocation="ddi-xhtml11.xsd"/>
+   <xs:import namespace="http://purl.org/dc/terms/" schemaLocation="dcterms.xsd"/>
+   
+   <!-- Base Structures -->
+   
+   <xs:attributeGroup name="GLOBALS">
+      <xs:attribute name="ID" type="xs:ID" use="optional"/>
+      <xs:attribute name="xml-lang" type="xs:NMTOKEN" use="optional">
+         <xs:annotation>
+            <xs:documentation>DO NOT USE THIS ATTRIBUTE. Its inclusion is an error that was persisted to retain backward compatibility. If this attribute has been used, transfer the content to xml:lang.</xs:documentation>
+         </xs:annotation>
+      </xs:attribute>
+      <xs:attribute ref="xml:lang" use="optional"/>
+      <xs:attribute name="source" default="producer">
+         <xs:simpleType>
+            <xs:restriction base="xs:NMTOKEN">
+               <xs:enumeration value="archive"/>
+               <xs:enumeration value="producer"/>
+            </xs:restriction>
+         </xs:simpleType>
+      </xs:attribute>
+      <xs:attribute name="elementVersion" type="xs:string" use="optional">
+	  	<xs:annotation>
+			<xs:documentation>Captures version of the element</xs:documentation>
+		</xs:annotation>
+	  </xs:attribute>
+      <xs:attribute name="elementVersionDate" type="dateSimpleType" use="optional">
+	  	<xs:annotation>
+			<xs:documentation>Indicates version date for the element. Use YYYY-MM-DD, YYYY-MM, or YYYY formats.</xs:documentation>
+		</xs:annotation>
+	  </xs:attribute>
+      <xs:attribute name="ddiLifecycleUrn" type="xs:anyURI" use="optional">
+         <xs:annotation>
+            <xs:documentation>Used to capture the DDI-Lifecycle type URN for the element. This may be captured during translation from DDI-Lifecycle to DDI-Codebook structure or in preparation for transferring to a DDI-Lifecycle structure.</xs:documentation>
+         </xs:annotation>
+      </xs:attribute>
+      <xs:attribute name="ddiCodebookUrn" type="xs:anyURI" use="optional">
+         <xs:annotation>
+            <xs:documentation>Used to capture the DDI-Codebook type URN for the element. This is used to assign a DDI-Codebook specific URN to the element, according the format prescribed by the DDI-Codebook standard.</xs:documentation>
+         </xs:annotation>
+      </xs:attribute>
+   </xs:attributeGroup>
+   
+   <xs:complexType name="baseElementType" abstract="true">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Base Element Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This type forms the basis for all elements. Every element may contain the attributes defined the GLOBALS attribute group.</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+      <xs:attributeGroup ref="GLOBALS"/>
+   </xs:complexType>
+   
+   <xs:complexType name="abstractTextType" mixed="true" abstract="true">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Abstract Text Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This type forms the basis for all textual elements. Textual elements may contain text or a mix of select elements. This type is abstract and is refined by more specific types which will limit the allowable elements and attributes. Any textual element will be a subset of this type and can be processed as such.</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>         
+         </xs:documentation>
+      </xs:annotation>
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:any namespace="##targetNamespace http://www.w3.org/1999/xhtml" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:anyAttribute namespace="##local"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:complexType name="simpleTextType" mixed="true">
+         <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Simple Text Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This type forms the basis of most textual elements. Elements using this type may have mixed content (text and child elements). The child elements are from the PHRASE, FORM, and xhtml:BlkNoForm.mix (a specific subset of XHTML) content groups. Note that if elements from the PHRASE and FORM groups must not be used with elements from the xhtml:BlkNoForm.mix group; one can use either elements from xhtml:BlkNoForm.mix or elements from the PHRASE and FORM groups. This type is extended in some cases to include additional attributes.</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>  
+         </xs:documentation>
+      </xs:annotation>
+      <xs:complexContent>
+         <xs:restriction base="abstractTextType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:group ref="FORM"/>         
+                  <xs:group ref="xhtml:BlkNoForm.mix"/>
+               </xs:choice>            
+            </xs:sequence>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:complexType name="conceptualTextType" mixed="true">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Conceptual Text Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This type forms this basis for a textual element which may also provide for a conceptual (see concept) description of the element a longer description (see txt). If the concept and/or txt elements are used, then the element should contain no other child elements or text. Note that if elements from the PHRASE and FORM groups must not be used with elements from the xhtml:BlkNoForm.mix group; one can use either elements from xhtml:BlkNoForm.mix or elements from the PHRASE and FORM groups.</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>          
+         </xs:documentation>
+      </xs:annotation>
+      <xs:complexContent>
+         <xs:restriction base="abstractTextType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:group ref="FORM"/>         
+                  <xs:group ref="xhtml:BlkNoForm.mix"/>
+                  <xs:element ref="concept"/>
+                  <xs:element ref="txt"/>
+               </xs:choice>               
+            </xs:sequence>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:complexType name="tableAndTextType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="abstractTextType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:group ref="FORM"/>         
+                  <xs:group ref="xhtml:BlkNoForm.mix"/>
+                  <xs:element ref="table"/>               
+               </xs:choice>               
+            </xs:sequence>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:complexType name="materialReferenceType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="abstractTextType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:group ref="FORM"/>
+                  <xs:group ref="xhtml:BlkNoForm.mix"/>
+                  <xs:element ref="citation"/>               
+               </xs:choice>               
+            </xs:sequence>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:complexType name="simpleTextAndDateType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="date" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:complexType name="phraseType" mixed="true">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Phrase Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This type restricts the simpleTextType to allow for only child elements from the PHRASE content group. It still allows for mixed content (text and child elements).</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+      <xs:complexContent>
+         <xs:restriction base="simpleTextType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+               </xs:choice>               
+            </xs:sequence>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:complexType name="stringType">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">String Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This type restricts the base abstractTextType to only allow for text (i.e. no child elements).</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+      <xs:simpleContent>
+         <xs:restriction base="abstractTextType">
+            <xs:simpleType>
+               <xs:restriction base="xs:string"/>
+            </xs:simpleType>
+         </xs:restriction>
+      </xs:simpleContent>
+   </xs:complexType>
+   
+   <xs:complexType name="integerType">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Integer Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This type restricts the base abstractTextType to only allow for an integer as text content. No child elements are allowed.</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+      <xs:simpleContent>
+         <xs:restriction base="abstractTextType">
+            <xs:simpleType>
+               <xs:restriction base="xs:integer"/>
+            </xs:simpleType>
+         </xs:restriction>
+      </xs:simpleContent>
+   </xs:complexType>
+   
+   <xs:simpleType name="dateSimpleType">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Date Simple Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This simple type is a union of the various XML Schema date formats. Using this type, a date can be expressed as a year (YYYY), a year and month (YYYY-MM), a date (YYYY-MM-DD) or a complete date and time (YYYY-MM-DDThh:mm:ss). All of these formats allow for an optional timezone offset to be specified.</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+      <xs:union memberTypes="xs:dateTime xs:date xs:gYearMonth xs:gYear"/>
+   </xs:simpleType>
+   
+   <xs:complexType name="dateType">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Date Type</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This type restricts the base abstractTextType to allow for only the union of types defined in dateSimpleType as text content. No child elements are allowed.</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+      <xs:simpleContent>
+         <xs:restriction base="abstractTextType">
+            <xs:simpleType>
+               <xs:restriction base="dateSimpleType"/>
+            </xs:simpleType>
+         </xs:restriction>
+      </xs:simpleContent>
+   </xs:complexType>
+   
+   <!-- PRHASE Elements -->
+   
+   <xs:group name="PHRASE">
+      <xs:choice>
+         <xs:element ref="ExtLink" minOccurs="0" maxOccurs="unbounded"/>
+         <xs:element ref="Link" minOccurs="0" maxOccurs="unbounded"/>
+      </xs:choice>
+   </xs:group>
+
+   <xs:complexType name="ExtLinkType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="URI" type="xs:string" use="required"/>
+            <xs:attribute name="role" type="xs:string"/>
+            <xs:attribute name="title" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="ExtLink" type="ExtLinkType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">External Link</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element permits encoders to provide links from any arbitrary element containing ExtLink as a subelement to electronic resources outside the codebook.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="LinkType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="refs" type="xs:IDREFS" use="required"/>
+            <xs:attribute name="role" type="xs:string"/>
+            <xs:attribute name="title" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="Link" type="LinkType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Link</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element permits encoders to provide links from any arbitrary element containing Link as a subelement to other elements in the codebook.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <!-- FORM Elements -->
+   
+   <xs:group name="FORM">
+      <xs:choice>
+         <xs:element ref="div"/>
+         <xs:element ref="emph"/>
+         <xs:element ref="head"/>
+         <xs:element ref="hi"/>
+         <xs:element ref="list"/>
+         <xs:element ref="p"/>
+      </xs:choice>
+   </xs:group>
+   
+   <xs:complexType name="formType" mixed="true" abstract="true">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Form Type</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This type defines the basis for all elements in the FORM content group. This is derived from the abstractTextType. The content may still be mixed (text and child elements), but the child elements are restricted to be those from the PHRASE and FORM content groups, or the itm and label elements. Further, the possible attributes are restricted. This type is abstract, so specific form elements will further refine this type, but all elements in the FORM content group will conform to this structure and may be processed as such.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+      <xs:complexContent>
+         <xs:restriction base="abstractTextType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:group ref="FORM"/>
+                  <xs:element ref="itm"/>
+                  <xs:element ref="label"/>
+               </xs:choice>               
+            </xs:sequence>
+            <xs:attribute name="n" type="xs:string"/>
+            <xs:attribute name="rend" type="xs:string"/>
+            <xs:attribute name="type" type="xs:string"/>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:complexType name="divType">
+      <xs:complexContent>
+         <xs:restriction base="formType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="FORM"/>
+               </xs:choice>               
+            </xs:sequence>
+            <xs:attribute name="type" type="xs:string" use="prohibited"/>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="div" type="divType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Division</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Formatting element: marks a subdivision in a text.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="emphType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="formType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:element ref="hi"/>
+                  <xs:element ref="list"/>
+               </xs:choice>               
+            </xs:sequence>
+            <xs:attribute name="type" type="xs:string" use="prohibited"/>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="emph" type="emphType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Emphasis</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Formatting element: marks words or phrases that are emphasized for rhetorical effect.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="headType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="formType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:element ref="emph"/>
+                  <xs:element ref="hi"/>
+                  <xs:element ref="list"/>
+               </xs:choice>               
+            </xs:sequence>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="head" type="headType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Head</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Formatting element: marks off a heading to a division, list, etc. </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="hiType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="formType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:element ref="emph"/>
+                  <xs:element ref="list"/>
+               </xs:choice>               
+            </xs:sequence>
+            <xs:attribute name="type" type="xs:string" use="prohibited"/>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="hi" type="hiType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Highlight</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Formatting element: marks a word or phrase as graphically distinct from the surrounding text, while making no claim for the reasons.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="listType">
+      <xs:complexContent>
+         <xs:restriction base="formType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:element ref="itm"/>
+                  <xs:element ref="label"/>
+               </xs:choice>               
+            </xs:sequence>
+            <xs:attribute name="type" default="simple">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="ordered"/>
+                     <xs:enumeration value="bulleted"/>
+                     <xs:enumeration value="simple"/>
+                     <xs:enumeration value="gloss"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="list" type="listType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">List</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Formatting element: contains any sequence of items (entries) organized as a list.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="itmType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="formType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:element ref="emph"/>
+                  <xs:element ref="hi"/>
+                  <xs:element ref="list"/>
+                  <xs:element ref="p"/>
+                  <xs:element ref="label"/>
+               </xs:choice>               
+            </xs:sequence>
+            <xs:attribute name="type" type="xs:string" use="prohibited"/>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="itm" type="itmType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Item</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Formatting element: marks entries (items) in a list.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="labelType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="formType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:element ref="emph"/>
+                  <xs:element ref="hi"/>               
+               </xs:choice>               
+            </xs:sequence>
+            <xs:attribute name="type" type="xs:string" use="prohibited"/>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="label" type="labelType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Label</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Formatting element: contains the label associated with an item in a list; in glossaries, marks the term being defined. </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Label</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">A short description of the parent element. Attribute "level" indicates the level to which the element applies (variable group, nCube group, variable, etc.). The "vendor" attribute allows for specification of different labels for use with different vendors' software. Attribute "country" allows specification of a different label by country for the same element to which it applies. Attribute "sdatrefs" allows pointing to specific dates, universes, or other information encoded in the study description. The attributes "country" and "sdatrefs" are intended to cover instances of comparative data, by retaining consistency in some elements over time and geography, but altering, as appropriate, information pertaining to date, language, and/or location.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <recGrp rectype="A" keyvar="H-SEQ" recidvar="PRECORD"> 
+                           <labl>Person (A) Record</labl> 
+                        </recGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <varGrp>
+                           <labl>Study Procedure Information</labl>
+                        </varGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <varGrp>
+                           <labl>Political Involvement and National Goals</labl>
+                        </varGrp>
+                     ]]></xhtml:samp> 
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <varGrp>
+                           <labl>Household Variable Section</labl>
+                        </varGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCubeGrp>
+                           <labl>Sex by Work Experience in 1999 by Income in 1999</labl>
+                        </nCubeGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCubeGrp>
+                           <labl>Tenure by Age of Householder</labl>
+                        </nCubeGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <labl>Why No Holiday-No Money</labl>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgryGrp>
+                           <labl>Other Agricultural and Related Occupations</labl>
+                        </catgryGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgry>
+                           <labl>Better</labl>
+                        </catgry>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgry>
+                           <labl>About the same</labl> 
+                        </catgry>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgry>
+                           <labl>Inap.</labl> 
+                        </catgry>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <labl>Age by Sex by Poverty Status</labl>
+                        </nCube>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <otherMat type="SAS data definition statements" level="study" URI="http:// www.icpsr.umich.edu">
+                           <labl>SAS Data Definition Statements for  ICPSR 6837</labl>
+                        </otherMat>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="pType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="formType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:element ref="emph"/>
+                  <xs:element ref="hi"/>
+                  <xs:element ref="list"/>
+               </xs:choice>               
+            </xs:sequence>
+               <xs:attribute name="type" type="xs:string" use="prohibited"/>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="p" type="pType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Paragraph</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Marks a paragraph.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <!-- Codebook Elements -->
+   
+   <xs:complexType name="abstractType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextAndDateType">
+            <xs:attribute name="contentType" use="optional">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="abstract"/>
+                     <xs:enumeration value="purpose"/>
+                     <xs:enumeration value="mixed"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="abstract" type="abstractType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Abstract</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">An unformatted summary describing the purpose, nature, and scope of the data collection, special characteristics of its contents, major subject areas covered, and what questions the PIs attempted to answer when they conducted the study. A listing of major variables in the study is important here. In cases where a codebook contains more than one abstract (for example, one might be supplied by the data producer and another prepared by the data archive where the data are deposited), the "source" and "date" attributes may be used to distinguish the abstract versions. Maps to Dublin Core Description element. Inclusion of this element in the codebook is recommended. The "date" attribute should follow ISO convention of YYYY-MM-DD. The contentType attribute provides forward-compatibility with DDI 3 by describing where the content fits in that structure, or if is mixed in terms of what is contained.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <abstract date="1999-01-28" source="ICPSR"> Data on labor force activity for the week prior to the survey are supplied in this collection. Information is available on the employment status, occupation, and industry of persons 15 years old and over. Demographic variables such as age, sex, race, marital status, veteran status, household relationship, educational background, and Hispanic origin are included. In addition to providing these core data, the May survey also contains a supplement on work schedules for all applicable persons aged 15 years and older who were employed at the time of the survey. This supplement focuses on shift work, flexible hours, and work at home for both main and second jobs.</abstract>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="accsPlacType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="URI" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="accsPlac" type="accsPlacType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Location of Data Collection</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Location where the data collection is currently stored. Use the URI attribute to provide a URN or URL for the storage site or the actual address from which the data may be downloaded.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="actMin" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Actions to Minimize Losses</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Summary of actions taken to minimize data loss. Includes information on actions such as follow-up visits, supervisory checks, historical matching, estimation, etc.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <actMin>To minimize the number of unresolved cases and reduce the potential nonresponse bias, four follow-up contacts were made with agencies that had not responded by various stages of the data collection process.</actMin>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="altTitl" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Alternative Title</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">A title by which the work is commonly referred, or an abbreviation of the title.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="anlyInfoType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="respRate" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="EstSmpErr" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataAppr" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="anlyInfo" type="anlyInfoType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Appraisal</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information on data appraisal.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="anlyUnitType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="conceptualTextType">
+            <xs:attribute name="unit" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="anlyUnit" type="anlyUnitType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Unit of Analysis</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Basic unit of analysis or observation that the file describes: individuals, families/households, groups, institutions/organizations, administrative units, etc. The "unit" attribute is included to permit the development of a controlled vocabulary for this element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <anlyUnit>individuals</anlyUnit>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+     
+   <xs:element name="anlysUnit" type="conceptualTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Analysis Unit</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Provides information regarding whom or what the variable/nCube describes. The element may be repeated only to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <anlysUnit>This variable reports election returns at the constituency level.</anlysUnit>
+                        </var>
+                     ]]></xhtml:samp> 
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <anlysUnit>Household</anlysUnit>
+                        </nCube>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="AuthEntyType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="affiliation" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="AuthEnty" type="AuthEntyType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Authoring Entity/Primary Investigator</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>The person, corporate body, or agency responsible for the work's substantive and intellectual content. Repeat the element for each author, and use "affiliation" attribute if available. Invert first and last name and use commas. Author of data collection (codeBook/stdyDscr/citation/rspStmt/AuthEnty) maps to Dublin Core Creator element. Inclusion of this element in codebook is recommended. </xhtml:p>
+                     <xhtml:p>The "author" in the Document Description should be the individual(s) or organization(s) directly responsible for the intellectual content of the DDI version, as distinct from the person(s) or organization(s) responsible for the intellectual content of the earlier paper or electronic edition from which the DDI edition may have been derived.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <AuthEnty>United States Department of Commerce. Bureau of the Census</AuthEnty>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <AuthEnty affiliation="European Commission">Rabier, Jacques-Rene</AuthEnty>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+ 
+   <xs:element name="avlStatus" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Availability Status</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Statement of collection availability. An archive may need to indicate that a collection is unavailable because it is embargoed for a period of time, because it has been superseded, because a new edition is imminent, etc. It is anticipated that a controlled vocabulary will be developed for this element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <avlStatus>This collection is superseded by CENSUS OF POPULATION, 1880 [UNITED STATES]: PUBLIC USE SAMPLE (ICPSR 6460).</avlStatus>
+                     ]]></xhtml:samp>
+                 </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="backwardType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="qstn" type="xs:IDREFS" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="backward" type="backwardType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Backflow</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Contains a reference to IDs of possible preceding questions. The "qstn" IDREFS may be used to specify the question IDs.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <qstn>
+                              <backward qstn="Q12 Q13 Q14 Q15">For responses on a similar topic, see questions 12-15.</backward>
+                           </qstn> 
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <qstn>
+                              <backward qstn="Q143"/>
+                           </qstn> 
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="biblCitType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="format" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="biblCit" type="biblCitType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Bibliographic Citation</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Complete bibliographic reference containing all of the standard elements of a citation that can be used to cite the work. The "format" attribute is provided to enable specification of the particular citation style used, e.g., APA, MLA, Chicago, etc.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <biblCit format="MRDF">Rabier, Jacques-Rene, and Ronald Inglehart. EURO-BAROMETER 11: YEAR OF  THE CHILD IN EUROPE, APRIL 1979 [Codebook file]. Conducted by Institut Francais D'Opinion Publique (IFOP), Paris, et al. ICPSR ed. Ann Arbor, MI: Inter-university Consortium for Political and Social Resarch [producer and distributor], 1981.</biblCit>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="boundPolyType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="polygon" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="boundPoly" type="boundPolyType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Geographic Bounding Polygon</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>This field allows the creation of multiple polygons to describe in a more detailed manner the geographic area covered by the dataset. It should only be used to define the outer boundaries of a covered area. For example, in the United States, such polygons can be created to define boundaries for Hawaii, Alaska, and the continental United States, but not interior boundaries for the contiguous states. This field is used to refine a coordinate-based search, not to actually map an area. </xhtml:p>
+                     <xhtml:p>If the boundPoly element is used, then geoBndBox MUST be present, and all points enclosed by the boundPoly MUST be contained within the geoBndBox. Elements westBL, eastBL, southBL, and northBL of the geoBndBox should each be represented in at least one point of the boundPoly description. </xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <geogCover>Nevada State</geogCover>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <boundPoly>
+                           <polygon>
+                              <point>
+                                 <gringLat>42.002207</gringLat>
+                                 <gringLon>-120.005729004</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>42.002207</gringLat>
+                                 <gringLon>-114.039663</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>35.9</gringLat>
+                                 <gringLon>-114.039663</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>36.080</gringLat>
+                                 <gringLon>-114.544</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>35.133</gringLat>
+                                 <gringLon>-114.542</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>35.00208499998</gringLat>
+                                 <gringLon>-114.63288</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>35.00208499998</gringLat>
+                                 <gringLon>-114.63323</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>38.999</gringLat>
+                                 <gringLon>-120.005729004</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>42.002207</gringLat>
+                                 <gringLon>-120.005729004</gringLon>
+                              </point>
+                           </polygon>
+                        </boundPoly>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <geogCover>Norway</geogCover>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <boundPoly>
+                           <polygon>
+                              <point>
+                                 <gringLat>80.76416</gringLat>
+                                 <gringLon>33.637497</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>80.76416</gringLat>
+                                 <gringLon>10.2</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>62.48395</gringLat>
+                                 <gringLon>4.789583</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>57.987915</gringLat>
+                                 <gringLon>4.789583</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>57.987915</gringLat>
+                                 <gringLon>11.8</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>61.27794</gringLat>
+                                 <gringLon>13.2336</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>63.19012</gringLat>
+                                 <gringLon>13.2336</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>67.28615</gringLat>
+                                 <gringLon>17.24580</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>68.14297</gringLat>
+                                 <gringLon>21.38362</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>68.14297</gringLat>
+                                 <gringLon>25.50054</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>69.39685</gringLat>
+                                 <gringLon>27.38137</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>68.76991</gringLat>
+                                 <gringLon>28.84424</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>68.76991</gringLat>
+                                 <gringLon>31.31021</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>71.42</gringLat>
+                                 <gringLon>31.31021</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>71.42</gringLat>
+                                 <gringLon>33.637497</gringLon>
+                              </point>
+                              <point>
+                                 <gringLat>80.76416</gringLat>
+                                 <gringLon>33.637497</gringLon>
+                              </point>
+                           </polygon>
+                        </boundPoly>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="caseQnty" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Number of cases / Record Quantity</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Number of cases or observations.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <caseQnty>1011</caseQnty>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="catStatType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="type" default="freq">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="freq"/>
+                     <xs:enumeration value="percent"/>
+                     <xs:enumeration value="crosstab"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherType" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="URI" type="xs:string" use="optional"/>
+            <xs:attribute name="methrefs" type="xs:IDREFS" use="optional"/>
+            <xs:attribute name="wgtd" default="not-wgtd">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="wgtd"/>
+                     <xs:enumeration value="not-wgtd"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="wgt-var" type="xs:IDREFS" use="optional"/>
+            <xs:attribute name="weight" type="xs:IDREFS" use="optional"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="catStat" type="catStatType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Category Level Statistic</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">May include frequencies, percentages, or crosstabulation results. This field can contain one of the following: 1. textual information (e.g., PCDATA), or 2. non-parseable character data (e.g., the statistics), or 3. some other form of external information (table, image, etc.) In case 1, the tag can be used to mark up character data; tables can also be included in the actual markup. In cases 2 or 3, the element can be left empty and the "URI" attribute used to refer to the external object containing the information. The attribute "type" indicates the type of statistics presented - frequency, percent, or crosstabulation. If a value of "other" is used for this attribute, the "otherType" attribute should take a value from a controlled vocabulary. This option should only be used when applying a controlled vocabulary to this attribute. Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <catgryGrp>
+                              <catStat type="freq">256</catStat>
+                           </catgryGrp>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="catValu" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Category Value</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The explicit response.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <catgry missing="Y" missType="inap">
+                              <catValu>9</catValu> 
+                           </catgry>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="catLevelType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="levelnm" type="xs:string"/>
+            <xs:attribute name="geoMap" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="catLevel" type="catLevelType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Category Level</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Used to describe the levels of the category hierarchy. Note that we do not indicate nesting levels or roll-up structures here. This is done to be able to support ragged hierarchies. A category level may be linked to one or more maps of the variable content. This id done by referencing the IDs of the appropriate geoMap elements in the attribute <xhtml:code>geoMap</xhtml:code>.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catlevel ID="Level1" levelnm="Broader sectors"/>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catlevel ID="Level2" levelnm="Narrower sectors"/>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catlevel ID="Level3" levelnm="Occupations" geoMap="GEO_1 GEO_2"/>
+                     ]]></xhtml:samp>                  
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="catgryType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="catValu" minOccurs="0"/>
+               <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="catStat" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="mrow" minOccurs="0"/>
+            </xs:sequence>
+            <xs:attribute name="missing" default="N">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="Y"/>
+                     <xs:enumeration value="N"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="missType" type="xs:string" use="optional"/>
+            <xs:attribute name="country" type="xs:string" use="optional"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS" use="optional"/>
+            <xs:attribute name="excls" default="true">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="true"/>
+                     <xs:enumeration value="false"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="catgry" type="xs:IDREFS" use="optional"/>
+            <xs:attribute name="level" type="xs:IDREF" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="catgry" type="catgryType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Category</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>A description of a particular response.</xhtml:p>
+                     <xhtml:p>The attribute "missing" indicates whether this category group contains missing data or not. </xhtml:p>
+                     <xhtml:p>The attribute "missType" is used to specify the type of missing data, e.g., inap., don't know, no answer, etc. </xhtml:p>
+                     <xhtml:p>The attribute "country" allows for the denotation of country-specific category values.</xhtml:p>
+                     <xhtml:p>The "sdatrefs" attribute records the ID values of all elements within the  summary data description that apply to this category. </xhtml:p>
+                     <xhtml:p>The exclusiveness attribute ("excls") should be set to "false" if the category can appear in more than one place in the classification hierarchy.</xhtml:p>
+                     <xhtml:p>The attribute "catgry" is an IDREF referencing any child categories of this category element. Used to capture nested hierarchies of categories.</xhtml:p>
+                     <xhtml:p>The attribute "level" is an IDREF referencing the catLevel ID in which this category exists.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catlevel ID="Level1" levelnm="Broader sectors"/>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catlevel ID="Level2" levelnm="Narrower sectors"/>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catlevel ID="Level3" levelnm="Occupations"/>
+                     ]]></xhtml:samp>
+                     <!-- ... -->
+                  <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgry ID="C1" catgry="C2" Level="Level1">
+                           <catValu>0</catValu>
+                           <labl>Management, professional and related occupations</labl>
+                        </catgry>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgry ID="C2" catgry="C3, C4" Level="Level2">
+                           <catValu>01</catValu>
+                           <labl>Management occupations</labl>
+                        </catgry>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgry ID="C3" Level="Level3">
+                           <catValu>011</catValu>
+                           <labl>Top executives</labl>
+                        </catgry>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgry ID="C4" Level="Level3">
+                           <catValu>012</catValu>
+                           <labl>Financial managers</labl>
+                        </catgry>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="catgryGrpType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="catStat" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="missing" default="N">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="Y"/>
+                     <xs:enumeration value="N"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="missType" type="xs:string" use="optional"/>
+            <xs:attribute name="catgry" type="xs:IDREFS" use="optional"/>
+            <xs:attribute name="catGrp" type="xs:IDREFS" use="optional"/>
+            <xs:attribute name="levelno" type="xs:string" use="optional"/>
+            <xs:attribute name="levelnm" type="xs:string" use="optional"/>
+            <xs:attribute name="compl" default="true">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="true"/>
+                     <xs:enumeration value="false"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="excls" default="true">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="true"/>
+                     <xs:enumeration value="false"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="catgryGrp" type="catgryGrpType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Category Group</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">A description of response categories that might be grouped together. The attribute "missing" indicates whether this category group contains missing data or not. The attribute "missType" is used to specify the type of missing data, e.g., inap., don't know, no answer, etc. The attribute catGrp is used to indicate all the subsidiary category groups which nest underneath the current category group. This allows for the encoding of a hierarchical structure of category groups. The "levelno" attribute allows the addition of a level number, and "levelnm" allows the addition of a level name to the category group. The completeness attribute ("compl") should be set to "false" if the category group is incomplete (not a complete aggregate of all sub-nodes or children). The exclusiveness attribute ("excls") should be set to "false" if the category group can appear in more than one place in the classification hierarchy.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="citReq" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Citation Requirement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Text of requirement that a data collection should be cited properly in articles or other publications that are based on analysis of the data.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <citReq>Publications based on ICPSR data collections should acknowledge those sources by  means of bibliographic citations. To ensure that such source attributions are captured for social science bibliographic utilities, citations must appear in footnotes or in the reference section of publications.</citReq>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="citationType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="titlStmt"/>
+               <xs:element ref="rspStmt" minOccurs="0"/>
+               <xs:element ref="prodStmt" minOccurs="0"/>
+               <xs:element ref="distStmt" minOccurs="0"/>
+               <xs:element ref="serStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="verStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="biblCit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="holdings" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:group ref="dc:elementsAndRefinementsGroup"/>
+            </xs:sequence>
+            <xs:attribute name="MARCURI" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="citation" type="citationType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Bibliographic Citation</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>This element encodes the bibliographic information for the work at the level specified: (1) Document Description, Citation (of Marked-up Document), (2) Document Description, Citation (of Marked-up Document Source), (3) Study Description, Citation (of Study), (4) Study Description, Other Material, and (5) Other Material for the study itself. Bibliographic information includes title information, statement of responsibility, production and distribution information, series and version information, text of a preferred bibliographic citation, and notes (if any). </xhtml:p>
+                     <xhtml:p>A MARCURI attribute is provided to link to the MARC record for the citation.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="cleanOpsType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="agency" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="cleanOps" type="cleanOpsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Cleaning Operations</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Methods used to "clean" the data collection, e.g., consistency checking, wild code checking, etc. The "agency" attribute permits specification of the agency doing the data cleaning.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <cleanOps>Checks for undocumented codes were performed, and data were subsequently revised in consultation with the principal investigator.</cleanOps>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="codInstr" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Coder Instructions</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Any special instructions to those who converted information from one form to another for a particular variable. This might include the reordering of numeric information into another form or the conversion of textual information into numeric information.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <codInstr>Use the standard classification tables to present responses to the question: What is your occupation? into numeric codes.</codInstr>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="codeBookType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="docDscr" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="stdyDscr" maxOccurs="unbounded"/>
+               <xs:element ref="fileDscr" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataDscr" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="otherMat" minOccurs="0" maxOccurs="unbounded">
+               	<xs:annotation>
+               		<xs:documentation>This should be used for materials that are primarily descriptions of the content and use of the study, such as appendices, sampling information, weighting details, methodological and technical details, publications based upon the study content, related studies or collection of studies, etc. This section is intended to include or to link to materials used in the production of the study or useful in the analysis of the study.</xs:documentation>
+               	</xs:annotation>
+               </xs:element>
+            </xs:sequence>
+            <xs:attribute name="version" type="xs:string" fixed="2.5"/>
+            <xs:attribute name="codeBookAgency" type="xs:NCName" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="codeBook" type="codeBookType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Codebook</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>Every element in the DDI DTD/Schema has the following attributes:</xhtml:p>
+                     <xhtml:p>ID - This uniquely identifies each element.</xhtml:p>
+                     <xhtml:p>xml-lang - Use of this attribute is deprecated, and it will no longer be supported in the next major version of the DDI specification. For newly created XML documents, please use xml:lang.</xhtml:p>
+                     <xhtml:p>xml:lang - This attribute specifies the language used in the contents and attribute values of any element in the XML document. Use of ISO (<xhtml:a href="http://www.iso.org/">www.iso.org</xhtml:a>) language codes is recommended.</xhtml:p>
+                     <xhtml:p>source - This attribute identifies the source that provided information in the element. If the documentation contains two differing sets of information on Sampling Procedure -- one provided by the data producer and one by the archive where the data is deposited -- this information can be distinguished through the use of the source attribute.</xhtml:p>
+                     <xhtml:p>Note also that the DDI contains a linking mechanism permitting arbitrary links between internal elements (See Link) and from internal elements to external sources (See ExtLink).</xhtml:p>
+                     <xhtml:p>The top-level element, codeBook, also includes a version attribute to specify the version number of the DDI specification.</xhtml:p>
+                     <xhtml:p>codeBookAgency - This attribute holds the agency name of the creator or maintainer of the codeBook instance as a whole, and is designed to support forward compatibility with DDI-Lifecycle. Recommend the agency name as filed with the DDI Agency ID Registry with optional additional sub-agency extensions.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="cohortType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="range" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="catRef" type="xs:IDREF" use="optional"/>
+            <xs:attribute name="value" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="cohort" type="cohortType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Cohort</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The element cohort is used when the nCube contains a limited number of categories from a particular variable, as opposed to the full range of categories. The attribute "catRef" is an IDREF to the actual category being used. The attribute "value" indicates the actual value attached to the category that is being used.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dmns>
+                           <cohort catRef="CV24_1" value="1"/>
+                        </dmns>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="collDateType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextAndDateType">
+            <xs:attribute name="event" default="single">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="start"/>
+                     <xs:enumeration value="end"/>
+                     <xs:enumeration value="single"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="cycle" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="collDate" type="collDateType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Date of Collection</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Contains the date(s) when the data were collected. Use the event attribute to specify "start", "end", or "single" for each date entered. The ISO standard for dates (YYYY-MM-DD) is recommended for use with the "date" attribute. The "cycle" attribute permits specification of the relevant cycle, wave, or round of data. Maps to Dublin Core Coverage element. Inclusion of this element in the codebook is recommended.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <collDate event="single" date="1998-11-10">10 November 1998</collDate>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="collMode" type="conceptualTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Mode of Data Collection</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The method used to collect the data; instrumentation characteristics. XHTML formatting may be used in the txt element for forward-compatibility with DDI 3.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <collMode>telephone interviews</collMode>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <collMode>face-to-face interviews</collMode>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <collMode>mail questionnaires</collMode>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <collMode>computer-aided telephone interviews (CATI)</collMode>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="collSitu" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Characteristics of Data Collection Situation</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Description of noteworthy aspects of the data collection situation. Includes information on factors such as cooperativeness of respondents, duration of interviews, number of call-backs, etc.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <collSitu>There were 1,194 respondents who answered questions in face-to-face interviews lasting approximately 75 minutes each.</collSitu>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="collSize" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Extent of Collection</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Summarizes the number of physical files that exist in a collection, recording the number of files that contain data and noting whether the collection contains machine-readable documentation and/or other supplementary files and information such as data dictionaries, data definition statements, or data collection instruments. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <collSize>1 data file + machine-readable documentation (PDF) + SAS data definition statements</collSize>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="colspecType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="colnum" type="xs:string" use="optional"/>
+            <xs:attribute name="colname" type="xs:NMTOKEN"/>
+            <xs:attribute name="colwidth" type="xs:string"/>
+            <xs:attribute name="colsep" type="xs:string"/>
+            <xs:attribute name="rowsep" type="xs:string"/>
+            <xs:attribute name="align">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="left"/>
+                     <xs:enumeration value="right"/>
+                     <xs:enumeration value="center"/>
+                     <xs:enumeration value="justify"/>
+                     <xs:enumeration value="char"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="char" type="xs:string"/>
+            <xs:attribute name="charoff" type="xs:NMTOKEN"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="colspec" type="colspecType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Column Specification</xhtml:h1>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="complete" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Completeness of Study Stored</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This item indicates the relationship of the data collected to the amount of data coded and stored in the data collection. Information as to why certain items of collected information were not included in the data file stored by the archive should be provided. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <complete>Because of embargo provisions, data values for some variables have been masked. Users should consult the data definition statements to see which variables are under embargo. A new version of the collection will be released by ICPSR after embargoes are lifted.</complete>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="conceptType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="vocab" type="xs:string"/>
+            <xs:attribute name="vocabURI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="concept" type="conceptType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Concept</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The general subject to which the parent element may be seen as pertaining. This element serves the same purpose as the keywords and topic classification elements, but at the data description level. The "vocab" attribute is provided to indicate the controlled vocabulary, if any, used in the element, e.g., LCSH (Library of Congress Subject Headings), MeSH (Medical Subject Headings), etc. The "vocabURI" attribute specifies the location for the full controlled vocabulary.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCubeGrp>
+                           <concept>Income</concept>
+                        </nCubeGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCubeGrp>
+                           <concept vocab="LCSH" vocabURI="http://lcweb.loc.gov/catdir/cpso/lcco/lcco.html" source="archive">more experience</concept>
+                        </nCubeGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <concept>Income</concept>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <concept vocab="LCSH" vocabURI="http://lcweb.loc.gov/catdir/cpso/lcco/lcco.html" source="archive">SF: 311-312 draft horses</concept>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="conditions" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Conditions</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Indicates any additional information that will assist the user in understanding the access and use conditions of the data collection.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <conditions>The data are available without restriction. Potential users of these datasets are advised, however, to contact the original principal investigator Dr. J. Smith (Institute for Social Research, The University of Michigan, Box 1248, Ann Arbor, MI 48106), about their intended uses of the data. Dr. Smith would also appreciate receiving copies of reports based on the datasets.</conditions>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="confDecType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="required" default="yes">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="yes"/>
+                     <xs:enumeration value="no"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="formNo" type="xs:string"/>
+            <xs:attribute name="URI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="confDec" type="confDecType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Confidentiality Declaration</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element is used to determine if signing of a confidentiality declaration is needed to access a resource. The "required" attribute is used to aid machine processing of this element, and the default specification is "yes". The "formNo" attribute indicates the number or ID of the form that the user must fill out. The "URI" attribute may be used to provide a URN or URL for online access to a confidentiality declaration form.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <confDec formNo="1">To download this dataset, the user must sign a declaration of confidentiality.</confDec>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <confDec URI="http://www.icpsr.umich.edu/HMCA/CTSform/contents.html"> To obtain this dataset, the user must complete a Restricted Data Use Agreement.</confDec>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="contactType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="affiliation" type="xs:string"/>
+            <xs:attribute name="URI" type="xs:string"/>
+            <xs:attribute name="email" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="contact" type="contactType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Contact Persons</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Names and addresses of individuals responsible for the work. Individuals listed as contact persons will be used as resource persons regarding problems or questions raised by the user community. The URI attribute should be used to indicate a URN or URL for the homepage of  the contact individual. The email attribute is used to indicate an email address for the contact individual. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <contact affiliation="University of Wisconsin" email="jsmith@...">Jane Smith</contact>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="ConOpsType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="agency" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="ConOps" type="ConOpsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Control Operations</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Methods to facilitate data control performed by the primary investigator or by the data archive. Specify any special programs used for such operations. The "agency" attribute maybe used to refer to the agency that performed the control operation.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <ConOps source="ICPSR">Ten percent of data entry forms were reentered to check for accuracy.</ConOps>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="controlledVocabUsedType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="codeListID" minOccurs="0"/>
+               <xs:element ref="codeListName" minOccurs="0"/>
+               <xs:element ref="codeListAgencyName" minOccurs="0"/>
+               <xs:element ref="codeListVersionID" minOccurs="0"/>
+               <xs:element ref="codeListURN" minOccurs="0"/>
+               <xs:element name="codeListSchemeURN" minOccurs="0"/>
+               <xs:element ref="usage" minOccurs="1" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="controlledVocabUsed" type="controlledVocabUsedType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Controlled Vocabulary Used</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Provides a code value, as well as a reference to the code list from which the value is taken. Note that the CodeValue can be restricted to reference an enumeration.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="codeListID" type="stringType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Code List ID</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Identifies the code list that the value is taken from.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<codeListID>TimeMethod</codeListID>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="codeListName" type="stringType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Code List Name</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Identifies the code list that the value is taken from with a human-readable name.</xhtml:div>
+               </xhtml:div>
+			   <xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<codeListName>Time Method</codeListName>]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="codeListAgencyName" type="stringType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Code List Agency Name</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Agency maintaining the code list.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<codeListAgencyName>DDI Alliance</codeListAgencyName>]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="codeListVersionID" type="stringType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Code List Version ID</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Version of the code list. (Default value is 1.0)</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<codeListVersionID>1.1</codeListVersionID>]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="codeListURN" type="stringType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Code List URN</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Identifies the code list that the value is taken from with a URN.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<codeListURN>urn:ddi-cv:TimeMethod:1.1</codeListURN>]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="codeListSchemeURN" type="stringType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Code List Scheme URN</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Identifies the code list scheme using a URN.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[<codeListSchemeURN>http://www.ddialliance.org/Specification/DDI-CV/TimeMethod_1.1_Genericode1.0_DDI-CVProfile1.0.xml</codeListSchemeURN>]]></xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="usage" type="usageType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Usage</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Defines where in the instance the controlled vocabulary which is identified is utilized. A controlled vocabulary may occur either in the content of an element or in an attribute on an element. The usage can either point to a collection of elements using an XPath via the selector element or point to a more specific collection of elements via their identifier using the specificElements element. If the controlled vocabulary occurs in an attribute within the element, the attribute element identifies the specific attribute. When specific elements are specified, an authorized code value may also be provided. If the current value of the element or attribute identified is not in the controlled vocabulary or is not identical to a code value, the authorized code value identifies a valid code value corresponding to the meaning of the content in the element or attribute.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="usageType">
+      <xs:sequence>
+         <xs:choice>
+            <xs:element ref="selector"/>
+            <xs:element ref="specificElements"/>
+         </xs:choice>
+         <xs:element ref="attribute" minOccurs="0"/>
+      </xs:sequence>
+   </xs:complexType>
+   
+   <xs:simpleType name="selectorType">
+      <xs:restriction base="xs:string">
+         <xs:pattern value="((//|/)(([\i-[:]][\c-[:]]*:)?[\i-[:]][\c-[:]]*|\*|[\i-[:]][\c-[:]]*:\*))+"/>
+      </xs:restriction>
+   </xs:simpleType>
+   
+   <xs:element name="selector" type="selectorType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Selector</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Identifies a collection of elements in which a controlled vocabulary is used. This is a simplified XPath which must correspond to the actual instance in which it occurs, which is to say that the fully qualified element names here must correspond to those in the instance. This XPath can only identify elements and does not allow for any predicates. The XPath must either be rooted or deep.</xhtml:div>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[<selector>/codeBook/stdyDscr/method/dataColl/timeMeth</selector>]]></xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="specificElementsType">
+      <xs:attribute name="refs" type="xs:IDREFS" use="required"/>
+      <xs:attribute name="authorizedCodeValue" type="xs:NMTOKEN" use="optional"/>
+   </xs:complexType>
+   
+   <xs:element name="specificElements" type="specificElementsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Specific Elements</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Identifies a collection of specific elements via their identifiers in the refs attribute, which allows for a tokenized list of identifier values which must correspond to identifiers which exist in the instance. The authorizedCodeValue attribute can be used to provide a valid code value corresponding to the meaning of the content in the element or attribute when the identified element or attribute does not use an actual valid value from the controlled vocabulary.</xhtml:div>
+				  </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[<specificElements refs="ICPSR4328timeMeth" authorizedCodeValue="CrossSection"/>]]></xhtml:samp>
+</xhtml:div>
+</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:simpleType name="attributeType">
+      <xs:restriction base="xs:string">
+         <xs:pattern value="@(([\i-[:]][\c-[:]]*:)?[\i-[:]][\c-[:]]*|\*|[\i-[:]][\c-[:]]*:\*)"/>
+      </xs:restriction>
+   </xs:simpleType>
+   
+   <xs:element name="attribute" type="attributeType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Attribute</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Identifies an attribute within the element(s) identified by the selector or specificElements in which the controlled vocabulary is used. The fully qualified name used here must correspond to that in the instance, which is to say that if the attribute is namespace qualified, the prefix used here must match that which is defined in the instance.</xhtml:div>
+               </xhtml:div>
+<xhtml:div>
+<xhtml:h2 class="section_header">Example</xhtml:h2>
+<xhtml:div class="example">
+<xhtml:samp class="xml_sample">
+<![CDATA[<attribute>type</attribute>]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+	            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="copyright" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Copyright</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Copyright statement for the work at the appropriate level. Copyright for data collection (codeBook/stdyDscr/citation/prodStmt/copyright) maps to Dublin Core Rights. Inclusion of this element is recommended.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <copyright>Copyright(c) ICPSR, 2000</copyright>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="CubeCoordType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="coordNo" type="xs:string"/>
+            <xs:attribute name="coordVal" type="xs:string"/>
+            <xs:attribute name="coordValRef" type="xs:IDREF"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="CubeCoord" type="CubeCoordType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1></xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This is an empty element containing only the attributes listed below. It is used to identify the coordinates of the data item within a logical nCube describing aggregate data. CubeCoord is repeated for each dimension of the nCube giving the coordinate number ("coordNo") and coordinate value ("coordVal"). Coordinate value reference ("cordValRef") is an ID reference to the variable that carries the coordinate value. The attributes provide a complete coordinate location of a cell within the nCube.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <CubeCoord coordNo="1" coordVal="3"/>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <CubeCoord coordNo="2" coordVal="7"/>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <CubeCoord coordNo="3" coordVal="2"/>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="dataAccsType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="setAvail" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="useStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="dataAccs" type="dataAccsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Access</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This section describes access conditions and terms of use for the data collection. In cases where access conditions differ across individual files or variables, multiple access conditions can be specified. The access conditions applying to a study, file, variable group, or variable can be indicated by an IDREF attribute on the study, file, variable group, or variable elements called "access".</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="dataApprType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="dataAppr" type="dataApprType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Other Forms of Data Appraisal</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Other issues pertaining to data appraisal. Describe here issues such as response variance, nonresponse rate and testing for bias, interviewer and response bias, confidence levels, question bias, etc. Attribute type allows for optional typing of data appraisal processes and option for controlled vocabulary.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataAppr>These data files were obtained from the United States House of Representatives, who received them from the Census Bureau accompanied by the following caveats: "The numbers contained herein are not official 1990 decennial Census counts. The numbers represent estimates of the population based on a statistical adjustment method applied to the official 1990 Census figures using a sample survey intended to measure overcount or undercount in the Census results. On July 15, 1991, the Secretary of Commerce decided not to adjust the official 1990 decennial Census counts (see 56 Fed. Reg. 33582, July 22, 1991). In reaching his decision, the Secretary determined that there was not sufficient evidence that the adjustment method accurately distributed the population across and within states. The numbers contained in these tapes, which had to be produced prior to the Secretary's decision, are now known to be biased. Moreover, the tapes do not satisfy standards for the publication of Federal statistics, as established in Statistical Policy Directive No. 2, 1978, Office of Federal Statistical Policy and Standards. Accordingly, the Department of Commerce deems that these numbers cannot be used for any purpose that legally requires use of data from the decennial Census and assumes no responsibility for the accuracy of the data for any purpose whatsoever. The Department will provide no assistance in interpretation or use of these numbers."</dataAppr>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="dataChck" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Extent of Processing Checks</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Indicate here, at the file level, the types of checks and operations performed on the data file. A controlled vocabulary may be developed for this element in the future. The following examples are based on ICPSR's Extent of Processing scheme:</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>The archive produced a codebook for this collection.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Consistency checks were performed by Data Producer/ Principal  Investigator.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Consistency checks performed by the archive.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>The archive generated SAS and/or SPSS data definition  statements for this collection.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Frequencies were provided by Data Producer/Principal Investigator.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Frequencies provided by the archive.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Missing data codes were standardized by Data  Producer/ Principal Investigator.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Missing data codes were standardized by the archive.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>The archive performed recodes and/or calculated derived variables. </dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Data were reformatted by the archive.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Checks for undocumented codes were performed by  Data Producer/Principal Investigator.</dataChck>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataChck>Checks for undocumented codes were performed by the archive.</dataChck>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="dataCollType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="timeMeth" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataCollector" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="collectorTraining" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="frequenc" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="sampProc" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="sampleFrame" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="targetSampleSize" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="deviat" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="collMode" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="resInstru" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="instrumentDevelopment" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="sources" minOccurs="0"/>
+               <xs:element ref="collSitu" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="actMin" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="ConOps" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="weight" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="cleanOps" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="dataColl" type="dataCollType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Collection Methdology</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information about the methodology employed in a data collection.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="sampleFrameType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="sampleFrameName" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="validPeriod" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="custodian" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="useStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="universe" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="frameUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="referencePeriod" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="updateProcedure" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="sampleFrame" type="sampleFrameType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Sample Frame</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Sample frame describes the sampling frame used for identifying the population from which the sample was taken. For example, a telephone book may be a sample frame for a phone survey. In addition to the name, label and text describing the sample frame, this structure lists who maintains the sample frame, the period for which it is valid, a use statement, the universe covered, the type of unit contained in the frame as well as the number of units available, the reference period of the frame and procedures used to update the frame. Use multiple use statements to provide different uses under different conditions. Repeat elements within the use statement to support multiple languages.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="sampleFrameName" type="stringType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+
+					<xhtml:h1 class="element_title">Sample Frame Name</xhtml:h1>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Name of the sample frame.</xhtml:div>
+					</xhtml:div>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<sampleFrameName>City of St. Paul Directory</sampleFrameName>]]></xhtml:samp>
+</xhtml:div>
+</xhtml:div>
+
+</xhtml:div>
+</xs:documentation>
+</xs:annotation>
+</xs:element>
+   
+   <xs:complexType name="eventDateType">
+      <xs:simpleContent>
+         <xs:extension base="dateType">
+            <xs:attribute name="event">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="start"/>
+                     <xs:enumeration value="end"/>
+                     <xs:enumeration value="single"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+   
+   <xs:element name="validPeriod" type="eventDateType">
+<xs:annotation>
+<xs:documentation>
+<xhtml:div>
+
+<xhtml:h1 class="element_title">Valid Period</xhtml:h1>
+
+<xhtml:div>
+<xhtml:h2 class="section_header">Description</xhtml:h2>
+<xhtml:div class="description">Defines a time period for the validity of the sampling frame. Enter dates in YYYY-MM-DD format.</xhtml:div>
+</xhtml:div>
+
+<xhtml:div>
+<xhtml:h2 class="section_header">Example</xhtml:h2>
+<xhtml:div class="example">
+<xhtml:samp class="xml_sample"><![CDATA[
+<validPeriod event=start">2009-07-01</validPeriod>
+<validPeriod event="end">2011-06-30</validPeriod>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+	</xs:element>
+
+   <xs:element name="referencePeriod" type="eventDateType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+
+					<xhtml:h1 class="element_title">Reference Period</xhtml:h1>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Indicates the period of time in which the sampling frame was actually used for the study in question. Use ISO 8601 date/time formats to enter the relevant date(s).</xhtml:div>
+					</xhtml:div>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<referencePeriod event="single">2009-06-01</referencePeriod>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+	</xs:element>
+   
+   <xs:complexType name="frameUnitType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="unitType"/>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="isPrimary" type="xs:boolean" default="true"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="frameUnit" type="frameUnitType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+
+					<xhtml:h1 class="element_title">Frame Unit</xhtml:h1>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Provides information about the sampling frame unit. The attribute "isPrimary" is boolean, indicating whether the unit is primary or not.</xhtml:div>
+					</xhtml:div>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<frameUnit isPrimary="true"><unitType numberOfUnits="150000">Primary listed owners of published phone numbers in the City of St. Paul</unitType></frameUnit>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+	</xs:element>
+
+   <xs:complexType name="unitTypeType">
+      <xs:simpleContent>
+         <xs:extension base="stringType">
+            <xs:attribute name="numberOfUnits" type="xs:integer" use="optional"/>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+
+   <xs:element name="unitType" type="unitTypeType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+
+					<xhtml:h1 class="element_title">Unit Type</xhtml:h1>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Describes the type of sampling frame unit. The attribute "numberOfUnits" provides the number of units in the sampling frame.</xhtml:div>
+					</xhtml:div>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<unitType numberOfUnits=150000">Primary listed owners of published phone numbers in the City of St. Paul</unitType>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+	</xs:element>
+
+   <xs:complexType name="targetSampleSizeType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="sampleSize" minOccurs="0"/>
+               <xs:element ref="sampleSizeFormula" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="targetSampleSize" type="targetSampleSizeType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Target Sample Size</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Provides both the target size of the sample (this is the number in the original sample, not the number of respondents) as well as the formula used for determining the sample size.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="sampleSize" type="integerType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+
+					<xhtml:h1 class="element_title">Sample Size</xhtml:h1>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This element provides the targeted sample size in integer format.</xhtml:div>
+					</xhtml:div>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<sampleSize>385</sampleSize>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+	</xs:element>
+   
+   <xs:element name="sampleSizeFormula" type="stringType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+
+					<xhtml:h1 class="element_title">Sample Size Formula</xhtml:h1>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">This element includes the formula that was used to determine the sample size.</xhtml:div>
+					</xhtml:div>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<sampleSizeFormula>n0=Z2pq/e2=(1.96)2(.5)(.5)/(.05)2=385 individuals</sampleSizeFormula>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+	</xs:element>
+   <xs:complexType name="instrumentDevelopmentType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="instrumentDevelopment" type="instrumentDevelopmentType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Instrument Development</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Describe any development work on the data collection instrument. Type attribute allows for the optional use of a defined development type with or without use of a controlled vocabulary.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<instrumentDevelopment type="pretesting">The questionnaire was pre-tested with split-panel tests, as well as an analysis of non-response rates for individual items, and response distributions.</instrumentDevelopment>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="updateProcedure" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Instrument Development</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Description of how and with what frequency the sample frame is updated.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<updateProcedure>Changes are collected as they occur through registration and loss of phone number from the specified geographic area. Data are compiled for the date June 1st of odd numbered years, and published on July 1st for the following two-year period.</updateProcedure>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="custodianType">
+      <xs:simpleContent>
+         <xs:extension base="stringType">
+            <xs:attribute name="affiliation" type="xs:string" use="optional"/>
+            <xs:attribute name="abbr" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+
+   <xs:element name="custodian" type="custodianType">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Custodian</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Custodian identifies the agency or individual who is responsible for creating or maintaining the sample frame. Attribute affiliation provides the affiliation of the custodian with an agency or organization. Attribute abbr provides an abbreviation for the custodian.</xhtml:div>
+					</xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<custodian>DEX Publications</custodian>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="collectorTrainingType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="collectorTraining" type="collectorTrainingType">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Collector Training</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Describes the training provided to data collectors including internviewer training, process testing, compliance with standards etc. This is repeatable for language and to capture different aspects of the training process. The type attribute allows specification of the type of training being described.</xhtml:div>
+					</xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<collectorTraining type="interviewer training">Describe research project, describe population and sample, suggest methods and language for approaching subjects, explain questions and key terms of survey instrument.</collectorTraining>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="dataCollectorType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="abbr" type="xs:string"/>
+            <xs:attribute name="affiliation" type="xs:string"/>
+            <xs:attribute name="role" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="dataCollector" type="dataCollectorType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Collector</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The entity (individual, agency, or institution) responsible for administering the questionnaire or interview or compiling the data. This refers to the entity collecting the data, not to the entity producing the documentation. Attribute "abbr" may be used to list common abbreviations given to agencies, etc. Attribute "affiliation" may be used to record affiliation of the data collector. The role attribute specifies the role of person in the data collection process.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataCollector abbr="SRC" affiliation="University of Michigan" role="questionnaire administration">Survey Research Center</dataCollector>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="dataDscrType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="varGrp" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="nCubeGrp" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="var" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="nCube" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="dataDscr" type="dataDscrType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Variable Description</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Description of variables.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="dataItemType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="CubeCoord" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="physLoc" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="varRef" type="xs:IDREF"/>
+            <xs:attribute name="nCubeRef" type="xs:IDREF"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="dataItem" type="dataItemType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1></xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Identifies a physical storage location for an individual data entry, serving as a link between the physical location and the logical content description of each data item. The attribute "varRef" is an IDREF that points to a discrete variable description. If the data item is located within an nCube (aggregate data), use the attribute "nCubeRef" (IDREF) to point to the appropriate nCube and the element CubeCoord to identify the coordinates of the data item within the nCube.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="dataKindType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="conceptualTextType">
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="dataKind" type="dataKindType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Kind of Data</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The type of data included in the file: survey data, census/enumeration data, aggregate data, clinical data, event/transaction data, program source code, machine-readable text, administrative records data, experimental data, psychological test, textual data, coded textual, coded documents, time budget diaries, observation data/ratings, process-produced data, etc. This element maps to Dublin Core Type element. The type attribute can be used for forward-compatibility with DDI 3, by providing a type for use of controlled vocabulary, as this is descriptive in DDI 2 and CodeValue in DDI 3.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataKind type="numeric">survey data</dataKind>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="dataMsng" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Missing Data</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element can be used to give general information about missing data, e.g., that missing data have been standardized across the collection, missing data are present because of merging, etc.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataMsng>Missing data are represented by blanks.</dataMsng>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataMsng>The codes "-1" and "-2" are used to represent missing data.</dataMsng>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="dataSrc" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Sources</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Used to list the book(s), article(s), serial(s), and/or machine-readable data file(s)--if any--that served as the source(s) of the data collection. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataSrc> "Voting Scores." CONGRESSIONAL QUARTERLY ALMANAC 33 (1977), 487-498.</dataSrc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataSrc>United States Internal Revenue Service Quarterly Payroll File</dataSrc>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="defntn" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Definition</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Rationale for why the group was constituted in this way.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <varGrp>
+                           <defntn>The following eight variables were only asked in Ghana.</defntn>
+                        </varGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCubeGrp>
+                           <defntn>The following four nCubes form a single presentation table.</defntn>
+                        </nCubeGrp>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="depDate" type="simpleTextAndDateType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Date of Deposit</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The date that the work was deposited with the archive that originally received it. The ISO standard for dates (YYYY-MM-DD) is recommended for use with the "date" attribute.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <depDate date="1999-01-25">January 25, 1999</depDate>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="deposReq" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Deposit Requirement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information regarding user responsibility for informing archives of their use of data through providing citations to the published work or providing copies of the manuscripts. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <deposReq> To provide funding agencies with essential information about use of archival resources and to facilitate the exchange of information about ICPSR participants' research activities, users of ICPSR data are requested to send to ICPSR bibliographic citations for, or copies of, each completed manuscript or thesis abstract. Please indicate in a cover letter which data were used.</deposReq>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="depositrType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="abbr" type="xs:string"/>
+            <xs:attribute name="affiliation" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="depositr" type="depositrType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Depositor</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The name of the person (or institution) who provided this work to the archive storing it. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <depositr abbr="BJS" affiliation="U.S. Department of Justice">Bureau of Justice Statistics</depositr>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="derivationType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="drvdesc" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="drvcmd" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="var" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="derivation" type="derivationType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Derivation</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Used only in the case of a derived variable, this element provides both a description of how the derivation was performed and the command used to generate the derived variable, as well as a specification of the other variables in the study used to generate the derivation. The "var" attribute provides the ID values of the other variables in the study used to generate this derived variable.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="deviat" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Major Deviations from the Sample Design</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information indicating correspondence as well as discrepancies between the sampled units (obtained) and available statistics for the population (age, sex-ratio, marital status, etc.) as a whole. XHTML formatting may be used in this element for forward-compatibility with DDI 3. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <deviat>The suitability of Ohio as a research site reflected its similarity to the United States as a whole. The evidence extended by Tuchfarber (1988) shows that Ohio is representative of the United States in several ways: percent urban and rural, percent of the population that is African American, median age, per capita income, percent living below the poverty level, and unemployment rate. Although results generated from an Ohio sample are not empirically generalizable to the United States, they may be suggestive of what might be expected nationally.</deviat>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="dataFingerprintType">
+      <xs:sequence>
+	     <xs:element name="digitalFingerprintValue" type="xs:string"/>
+		 <xs:element name="algorithmSpecification" type="xs:string" minOccurs="0"/>
+		 <xs:element name="algorithmVersion" type="xs:string" minOccurs="0"/>
+	  </xs:sequence>
+	  <xs:attribute name="type" use="required">
+         <xs:simpleType>
+            <xs:restriction base="xs:NMTOKEN">
+               <xs:enumeration value="data"/>
+               <xs:enumeration value="dataFile"/>
+            </xs:restriction>
+         </xs:simpleType>
+      </xs:attribute>
+   </xs:complexType>
+
+   <xs:element name="dataFingerprint" type="dataFingerprintType">
+      <xs:annotation>
+	     <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Fingerprint</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Allows for assigning a hash value (digital fingerprint) to the data or data file. Set the attribute flag to "data" when the hash value provides a digital fingerprint to the data contained in the file regardless of the storage format (ASCII, SAS, binary, etc.). One approach to compute a data fingerprint is the Universal Numerical Fingerprint (UNF). Set the attribute flag to "dataFile" if the digital fingerprint is only for the data file in its current storage format. Provide the digital fingerprint in digitalFingerprintValue and identify the algorithm specification used (add version as a separate entry if it is not part of the specification entry).</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<dataFingerprint type="data"><digitalFingerprintValue>UNF:3:DaYlT6QSX9r0D50ye+tXpA== </digitalFingerprintValue>
+<algorithmSpecification>UNF v5.0 Calculation Producture [http://thedata.org/book/unf-version-5-0]</algorithmSpecification><algorithmVersion>UNF V5</algorithmVersion></dataFingerprint>
+
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+		 </xs:documentation>
+	  </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="dimensnsType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="caseQnty" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="varQnty" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="logRecL" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="recPrCas" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="recNumTot" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="dimensns" type="dimensnsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">File Dimensions</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Dimensions of the overall file.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="disclaimer" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Disclaimer</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information regarding responsibility for uses of the data collection. This element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <disclaimer>The original collector of the data, ICPSR, and the relevant funding agency bear no responsibility for uses of this collection or for interpretations or inferences based upon such uses.</disclaimer>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="distDate" type="simpleTextAndDateType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Date of Distribution</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Date that the work was made available for distribution/presentation. The ISO standard for dates (YYYY-MM-DD) is recommended for use with the "date" attribute.  If using a text entry in the element content, the element may be repeated to support multiple language expressions.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <distDate date="1999-01-25">January 25, 1999</distDate>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="distStmtType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="distrbtr" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="contact" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="depositr" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="depDate" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="distDate" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="distStmt" type="distStmtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Distributor Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Distribution statement for the work at the appropriate level: marked-up document; marked-up document source; study; study description, other material; other material for study.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="distrbtrType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="abbr" type="xs:string"/>
+            <xs:attribute name="affiliation" type="xs:string"/>
+            <xs:attribute name="URI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="distrbtr" type="distrbtrType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Distributor</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The organization designated by the author or producer to generate copies of the particular work including any necessary editions or revisions. Names and addresses may be specified and other archives may be co-distributors. A URI attribute is included to provide an URN or URL to the ordering service or download facility on a Web site. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <distrbtr abbr="ICPSR" affiliation="Institute for Social Research" URI="http://www.icpsr.umich.edu">Ann Arbor, MI: Inter-university Consortium for Political and Social Research</distrbtr>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="dmnsType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="cohort" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="rank" type="xs:string"/>
+            <xs:attribute name="varRef" type="xs:IDREF"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="dmns" type="dmnsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Dimension</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element defines a variable as a dimension of the nCube, and should be repeated to describe each of the cube's dimensions. The attribute "rank" is used to define the coordinate order (rank="1", rank="2", etc.) The attribute "varRef" is an IDREF that points to the variable that makes up this dimension of the nCube.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="docDscrType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="citation" minOccurs="0"/>
+               <xs:element ref="guide" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="docStatus" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="docSrc" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="controlledVocabUsed" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="docDscr" type="docDscrType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Document Description</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The Document Description consists of bibliographic information describing the DDI-compliant document itself as a whole. This Document Description can be considered the wrapper or header whose elements uniquely describe the full contents of the compliant DDI file. Since the Document Description section is used to identify the DDI-compliant file within an electronic resource discovery environment, this section should be as complete as possible. The author in the Document Description should be the individual(s) or organization(s) directly responsible for the intellectual content of the DDI version, as distinct from the person(s) or organization(s) responsible for the intellectual content of the earlier paper or electronic edition from which the DDI edition may have been derived. The producer in the Document Description should be the agency or person that prepared the marked-up document. Note that the Document Description section contains a Documentation Source subsection consisting of information about the source of the DDI-compliant file-- that is, the hardcopy or electronic codebook that served as the source for the marked-up codebook. These sections allow the creator of the DDI file to produce version, responsibility, and other descriptions relating to both the creation of that DDI file as a separate and reformatted version of source materials (either print or electronic) and the original source materials themselves. </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="docSrcType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="titlStmt"/>
+               <xs:element ref="rspStmt" minOccurs="0"/>
+               <xs:element ref="prodStmt" minOccurs="0"/>
+               <xs:element ref="distStmt" minOccurs="0"/>
+               <xs:element ref="serStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="verStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="biblCit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="holdings" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="MARCURI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="docSrc" type="docSrcType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Documentation Source</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Citation for the source document. This element encodes the bibliographic information describing the source codebook, including title information, statement of responsibility, production and distribution information, series and version information, text of a preferred bibliographic citation, and notes (if any). Information for this section should be taken directly from the source document whenever possible. If additional information is obtained and entered in the elements within this section, the source of this information should be noted in the source attribute of the particular element tag. A MARCURI attribute is provided to link to the MARC record for this citation.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="docStatus" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Documentation Status</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Use this field to indicate if the documentation is being presented/distributed before it has been finalized. Some data producers and social science data archives employ data processing strategies that provide for release of data and documentation at various stages of processing. The element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docStatus>This marked-up document includes a provisional data dictionary and brief citation only for the purpose of providing basic access to the data file. A complete codebook will be published at a later date.</docStatus>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="drvcmdType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="syntax" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="drvcmd" type="drvcmdType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Derivation Command</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The actual command used to generate the derived variable. The "syntax" attribute is used to indicate the command language employed (e.g., SPSS, SAS, Fortran, etc.). The element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <derivation>
+                              <drvcmd syntax="SPSS">RECODE V1 TO V3 (0=1) (1=0) (2=-1) INTO DEFENSE WELFAREHEALTH. </drvcmd>
+                           </derivation>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="drvdesc" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Derivation Description</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">A textual description of the way in which this variable was derived. The element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <derivation>
+                              <drvdesc> VAR215.01 "Outcome of first pregnancy" (1988 NSFG=VAR611 PREGOUT1) If R has never been pregnant (VAR203 PREGNUM EQ 0) then OUTCOM01 is blank/inapplicable. Else, OUTCOM01 is transferred from VAR225 OUTCOME for R's 1st pregnancy. </drvdesc>
+                           </derivation>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="eastBL" type="phraseType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">East Bounding Longitude</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The easternmost coordinate delimiting the geographic extent of the dataset. A valid range of values, expressed in decimal degrees (positive east and positive north), is: -180,0 &lt;= East Bounding Longitude Value &lt;= 180,0</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="embargoType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextAndDateType">
+            <xs:attribute name="event" default="notBefore">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="notBefore"/>
+                     <xs:enumeration value="notAfter"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="format" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="embargo" type="embargoType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Embargo</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>Provides information on variables/nCubes which are not currently available because of policies established by the principal investigators and/or data producers. The ISO standard for dates (YYYY-MM-DD) is recommended for use with the "date" attribute. An "event" attribute is provided to specify "notBefore" or "notAfter" ("notBefore" is the default). A "format" attribute is provided to ensure that this information will be machine-processable, and specifies a format for the embargo element.</xhtml:p>
+                     <xhtml:p>The "format" attribute could be used to specify other conventions for the way that information within the embargo element is set out, if conventions for encoding embargo information were established in the future.</xhtml:p>
+							<xhtml:p>This element may be repeated to support multiple language expressions of the content.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <embargo event="notBefore" date="2001-09-30"> The data associated with this variable/nCube will not become available until September 30, 2001, because of embargo provisions established by the data producers.</embargo>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="entryType">
+      <xs:simpleContent>
+         <xs:extension base="stringType">
+            <xs:attribute name="colname" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="namest" type="xs:NMTOKEN"/>
+            <xs:attribute name="nameend" type="xs:NMTOKEN"/>
+            <xs:attribute name="morerows" type="xs:string"/>
+            <xs:attribute name="colsep" type="xs:string"/>
+            <xs:attribute name="rowsep" type="xs:string"/>
+            <xs:attribute name="align">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="left"/>
+                     <xs:enumeration value="right"/>
+                     <xs:enumeration value="center"/>
+                     <xs:enumeration value="justify"/>
+                     <xs:enumeration value="char"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="char" type="xs:string"/>
+            <xs:attribute name="charoff" type="xs:NMTOKEN"/>
+            <xs:attribute name="valign">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="top"/>
+                     <xs:enumeration value="middle"/>
+                     <xs:enumeration value="bottom"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+   
+   <xs:element name="entry" type="entryType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Table Entry</xhtml:h1>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="EstSmpErr" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Estimates of Sampling Error</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Measure of how precisely one can estimate a population value from a given sample.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <EstSmpErr> To assist NES analysts, the PC SUDAAN program was used to compute sampling errors for a wide-ranging example set of proportions estimated from the 1996 NES Pre-election Survey dataset. For each estimate, sampling errors were computed for the total sample and for twenty demographic and political affiliation subclasses of the 1996 NES Pre-election Survey sample. The results of these sampling error computations were then summarized and translated into the general usage sampling error table provided in Table 11. The mean value of deft, the square root of the design effect, was found to be 1.346. The design effect was primarily due to weighting effects (Kish, 1965) and did not vary significantly by subclass size. Therefore the generalized variance table is produced by multiplying the simple random sampling standard error for each proportion and sample size by the average deft for the set of sampling error computations.</EstSmpErr>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="fileCont" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Contents of Files</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Abstract or description of the file. A summary describing the purpose, nature, and scope of the data file, special characteristics of its contents, major subject areas covered, and what questions the PIs attempted to answer when they created the file. A listing of major variables in the file is important here. In the case of multi-file collections, this uniquely describes the contents of each file.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileCont>Part 1 contains both edited and constructed variables describing demographic and family relationships, income, disability, employment, health insurance status, and utilization data for all of 1987.</fileCont>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="fileDscrType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="fileTxt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="locMap" minOccurs="0"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="URI" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+            <xs:attribute name="methrefs" type="xs:IDREFS"/>
+            <xs:attribute name="pubrefs" type="xs:IDREFS"/>
+            <xs:attribute name="access" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="fileDscr" type="fileDscrType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Files Description</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>Information about the data file(s) that comprises a collection. This section can be repeated for collections with multiple files. </xhtml:p>
+                     <xhtml:p>The "URI" attribute may be a URN or a URL that can be used to retrieve the file. The "sdatrefs" are summary data description references that record the ID values of all elements within the summary data description section of the Study Description that might apply to the file. These elements include: time period covered, date of collection, nation or country, geographic coverage, geographic unit, unit of analysis, universe, and kind of data. The "methrefs" are methodology and processing references that record the ID values of all elements within the study methodology and processing section of the Study Description that might apply to the file. These elements include information on data collection and data appraisal (e.g., sampling, sources, weighting, data cleaning, response rates, and sampling error estimates). The "pubrefs" attribute provides a link to publication/citation references and records the ID values of all citations elements within Other Study Description Materials or Other Study-Related Materials that pertain to this file. "Access" records the ID values of all elements in the Data Access section that describe access conditions for this file.</xhtml:p>
+                     <xhtml:p>Remarks: When a codebook documents two different physical instantiations of a data file, e.g., logical record length (or OSIRIS) and card-image version, the Data File Description should be repeated to describe the two separate files. An ID should be assigned to each file so that in the Variable section the location of each variable on the two files can be distinguished using the unique file IDs.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileDscr ID="CARD-IMAGE" URI="www.icpsr.umich.edu/cgi-bin/archive.prl?path=ICPSR&amp;num=7728"/>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileDscr ID="LRECL" URI="www.icpsr.umich.edu/cgi-bin/archive.prl?path=ICPSR&amp;num=7728"/>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="fileName" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">File Name</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Contains a short title that will be used to distinguish a particular file/part from other files/parts in the data collection. The element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileName ID="File1">Second-Generation Children Data</fileName>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="filePlac" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Place of File Production</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Indicates whether the file was produced at an archive or produced elsewhere.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <filePlac>Washington, DC: United States Department of Commerce, Bureau of the Census</filePlac>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="fileQnty" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Number of Files</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Total number of physical files associated with a collection.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileQnty>5 files</fileQnty>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="fileStrcType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="recGrp" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="type" default="rectangular">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="rectangular"/>
+                     <xs:enumeration value="hierarchical"/>
+                     <xs:enumeration value="relational"/>
+                     <xs:enumeration value="nested"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherType" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="fileStrcRef" type="xs:IDREF" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="fileStrc" type="fileStrcType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">File Structure</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Type of file structure. The attribute "type" is used to indicate hierarchical, rectangular, relational, or nested (the default is rectangular). If the file is rectangular, the next relevant element is File Dimensions. If the "other" value is used for the type attribute, then the otherType attribute should have a value specifying the other type.The otherType attribute should only be used when applying a controlled vocabulary to this attribute. Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs. The fileStrcRef attribute allows for multiple data files with different coverage but the same file structure to share a single fileStrc. The file structure is fully described in the first fileTxt within the fileDscr and then the fileStrc in subsequent fileTxt descriptions would reference the first fileStrcRef rather than repeat the details.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="fileTxtType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="fileName" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="fileCitation" minOccurs="0"/>
+               <xs:element ref="dataFingerprint" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="fileCont" minOccurs="0"/>
+               <xs:element ref="fileStrc" minOccurs="0"/>
+               <xs:element ref="dimensns" minOccurs="0"/>
+               <xs:element ref="fileType" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="format" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="filePlac" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataChck" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="ProcStat" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataMsng" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="software" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="verStmt" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="fileTxt" type="fileTxtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">File-by-File Description</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Provides descriptive information about the data file. A file name and a full bibliographic citation for the file may be entered, as well as a data fingerprint, if available. Information about the physical properties of the data file is also supported. Make sure to fill out topcClass for the study as these can be used by the data file. Note coverage constraints in fileCont.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="fileCitation" type="citationType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+
+					<xhtml:h1 class="element_title">File Citation</xhtml:h1>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">The complex element fileCitation provides for a full bibliographic citation option for each data file described in fileDscr. To support accurate citation of a data file the minimum element set includes: titl, IDNo, authEnty, producer, and prodDate. If a DOI is available for the data file enter this in the IDNo (this element is repeatable). If a hash value (digital fingerprint) has been created for the data file enter the information regarding its value and algorithm specification in digitalFingerprint.</xhtml:div>
+					</xhtml:div>
+
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<fileCitation>
+<titlStmt>
+<titl>ABC News/Washington Post Monthly Poll, December 2010</titl>
+<IDNo>http://dx.doi.org/10.3886/ICPSR32547.v1</IDNo>
+</titlStmt>
+<rspStmt>
+<AuthEnty>ABC News</AuthEnty>
+<AuthEnty>The Washington Post</AuthEnty>
+</rspStmt>
+<prodStmt>
+<producer>ABC News</producer>
+<prodDate>2011</prodDate>
+</prodStmt>
+</fileCitation>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+</xs:element>
+   
+   <xs:complexType name="fileTypeType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="charset" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="fileType" type="fileTypeType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Type of File</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Types of data files include raw data (ASCII, EBCDIC, etc.) and software-dependent files such as SAS datasets, SPSS export files, etc. If the data are of mixed types (e.g., ASCII and packed decimal), state that here. Note that the element varFormat permits specification of the data format at the variable level. The "charset" attribute allows one to specify the character set used in the file, e.g., US-ASCII, EBCDIC, UNICODE UTF-8, etc. The element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileType charset="US-ASCII">ASCII data file</fileType>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="format" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Format</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Physical format of the data file: Logical record length format, card-image format (i.e., data with multiple records per case), delimited format, free format, etc. The element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <format>comma-delimited</format>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="forwardType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="qstn" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="forward" type="forwardType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Forward Progression</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Contains a reference to IDs of possible following questions. The "qstn" IDREFS may be used to specify the question IDs.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <qstn>
+                              <forward qstn="Q120 Q121 Q122 Q123 Q124">If yes, please ask questions 120-124.</forward>
+                           </qstn>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="frequencType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="freq" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="frequenc" type="frequencType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Frequency of Data Collection</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">For data collected at more than one point in time, the frequency with which the data were collected. The "freq" attribute is included to permit the development of a controlled vocabulary for this element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <freq>monthly</freq>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <freq>quarterly</freq>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="fundAgType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="abbr" type="xs:string"/>
+            <xs:attribute name="role" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="fundAg" type="fundAgType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Funding Agency/Sponsor</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The source(s) of funds for production of the work. If different funding agencies sponsored different stages of the production process, use the "role" attribute to distinguish them.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fundAg abbr="NSF" role="infrastructure">National Science Foundation</fundAg>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fundAg abbr="SUN" role="equipment">Sun Microsystems</fundAg>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="geoBndBoxType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="westBL"/>
+               <xs:element ref="eastBL"/>
+               <xs:element ref="southBL"/>
+               <xs:element ref="northBL"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="geoBndBox" type="geoBndBoxType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Geographic Bounding Box</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The fundamental geometric description for any dataset that models geography. GeoBndBox is the minimum box, defined by west and east longitudes and north and south latitudes, that includes the largest geographic extent of the dataset's geographic coverage. This element is used in the first pass of a coordinate-based search. If the boundPoly element is included, then the geoBndBox element MUST be included.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <geogCover>Nevada State</geogCover>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <geoBndBox>
+                           <westBL>-120.005729004</westBL>
+                           <eastBL>-114.039663</eastBL>
+                           <southBL>35.00208499998</southBL>
+                           <northBL>42.002207</northBL>
+                        </geoBndBox>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <geogCover>Norway</geogCover>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <geoBndBox>
+                           <westBL>4.789583</westBL>
+                           <eastBL>33.637497</eastBL>
+                           <southBL>57.987915</southBL>
+                           <northBL>80.76416</northBL>
+                        </geoBndBox>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="geoMapType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="URI" type="xs:string"/>
+            <xs:attribute name="mapformat" type="xs:string"/>
+            <xs:attribute name="levelno" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="geoMap" type="geoMapType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Geographic Map</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element is used to point, using a "URI" attribute, to an external map that displays the geography in question. The "levelno" attribute indicates the level of the geographic hierarchy relayed in the map. The "mapformat" attribute indicates the format of the map.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="geogCover" type="conceptualTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Geographic Coverage</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information on the geographic coverage of the data. Includes the total geographic scope of the data, and any additional levels of geographic coding provided in the variables. Maps to Dublin Core Coverage element. Inclusion of this element in the codebook is recommended. Fpor forward-compatibility, DDI 3 XHTML tags may be used in this element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <geogCover>State of California</geogCover>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="geogUnit" type="conceptualTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Geographic Unit</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Lowest level of geographic aggregation covered by the data.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <geogUnit>state</geogUnit>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="grantNoType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="agency" type="xs:string"/>
+            <xs:attribute name="role" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="grantNo" type="grantNoType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Grant Number</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The grant/contract number of the project that sponsored the effort. If more than one, indicate the appropriate agency using the "agency" attribute. If different funding agencies sponsored different stages of the production process, use the "role" attribute to distinguish the grant numbers. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <grantNo agency="Bureau of Justice Statistics">J-LEAA-018-77</grantNo>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="gringLat" type="phraseType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">G-Ring Latitude</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Latitude (y coordinate) of a point. Valid range   expressed in decimal degrees is as follows: -90,0 to 90,0 degrees (latitude)</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="gringLon" type="phraseType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">G-Ring Longitude</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Longitude (x coordinate) of a point. Valid range expressed in decimal degrees is as follows: -180,0 to 180,0 degrees (longitude)</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="guide" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Guide to Codebook</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">List of terms and definitions used in the documentation. Provided to assist users in using the document correctly. This element was intended to reflect the section in OSIRIS codebooks that assisted users in reading and interpreting a codebook. Each OSIRIS codebook contained a sample codebook page that defined the codebook conventions.  The element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="holdingsType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="location" type="xs:string"/>
+            <xs:attribute name="callno" type="xs:string"/>
+            <xs:attribute name="URI" type="xs:string"/>
+            <xs:attribute name="media" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="holdings" type="holdingsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Holdings Information</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information concerning either the physical or electronic holdings of the cited work. Attributes include: location--The physical location where a copy is held; callno--The call number for a work at the location specified; and URI--A URN or URL for accessing the electronic copy of the cited work. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <holdings location="ICPSR DDI Repository" callno="inap." URI="http://www.icpsr.umich.edu/DDIrepository/">Marked-up Codebook for Current Population Survey, 1999: Annual Demographic File</holdings>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <holdings location="University of Michigan Graduate Library" callno="inap." URI="http://www.umich.edu/library/">Codebook for Current Population Survey, 1999: Annual Demographic File </holdings>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="IDNoType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="agency" type="xs:string"/>
+            <xs:attribute name="level">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="study"/>
+                     <xs:enumeration value="file"/>
+                     <xs:enumeration value="project"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="IDNo" type="IDNoType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Identification Number</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Unique string or number (producer's or archive's number). An "agency" attribute is supplied. Identification Number of data collection maps to Dublin Core Identifier element. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <IDNo agency="ICPSR">6678</IDNo>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <IDNo agency="ZA">2010</IDNo>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="imputation" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Imputation</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">According to the Statistical Terminology glossary maintained by the National Science Foundation, this is "the process by which one estimates missing values for items that a survey respondent failed to provide," and if applicable in this context, it refers to the type of procedure used. When applied to an nCube, imputation takes into consideration all of the dimensions that are part of that nCube. This element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <imputation>This variable contains values that were derived by substitution.</imputation>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="invalrngType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:choice minOccurs="1" maxOccurs="unbounded">
+                  <xs:element ref="item"/>
+                  <xs:element ref="range"/>
+               </xs:choice>
+               <xs:element ref="key" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="invalrng" type="invalrngType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Range of Invalid Data Values</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Values for a particular variable that represent   missing data, not applicable responses, etc.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <invalrng>
+                           <range UNITS="INT" min="98" max="99"/> 
+                           <key> 
+                     			98 DK 
+                     			99 Inappropriate
+                     		</key> 
+                        </invalrng>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="itemType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="UNITS" default="INT">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="INT"/>
+                     <xs:enumeration value="REAL"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="VALUE" type="xs:string" use="required"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="item" type="itemType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Value Item</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The counterpart to Range; used to encode individual values. This is an empty element consisting only of its attributes. The "UNITS" attribute permits the specification of integer/real numbers. The "VALUE" attribute specifies the actual value.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <valrng>
+                           <item VALUE="1"/>
+                           <item VALUE="2"/>
+                           <item VALUE="3"/> 
+                        </valrng>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="ivuInstr" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Interviewer Instructions</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Specific instructions to the individual conducting an interview.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <qstn>
+                              <ivuInstr>Please prompt the respondent if they are reticent to answer this question.</ivuInstr>
+                           </qstn>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="key" type="tableAndTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Range Key</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element permits a listing of the category values and labels. While this information is coded separately in the Category element, there may be some value in having this information in proximity to the range of valid and invalid values. A table is permissible in this element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <valrng>
+                           <range UNITS="INT" maxExclusive="95" min="05" max="80"> </range> 
+                           <key>
+                        		05 (PSU) Parti Socialiste Unifie et extreme gauche (Lutte Ouvriere) [United Socialists and extreme left (Workers Struggle)] 
+                        		50 Les Verts [Green Party] 
+                        		80 (FN) Front National et extreme droite [National Front and extreme right] 
+                     		</key>
+                        </valrng>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="keywordType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="vocab" type="xs:string"/>
+            <xs:attribute name="vocabURI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="keyword" type="keywordType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Keywords</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Words or phrases that describe salient aspects of a data collection's content. Can be used for building keyword indexes and for classification and retrieval purposes. A controlled vocabulary can be employed. Maps to Dublin Core Subject element. The "vocab" attribute is provided for specification of the controlled vocabulary in use, e.g., LCSH, MeSH, etc. The "vocabURI" attribute specifies the location for the full controlled vocabulary. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <keyword vocab="ICPSR Subject Thesaurus" vocabURI="http://www.icpsr.umich.edu/thesaurus/subject.html">quality of life</keyword>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <keyword vocab="ICPSR Subject Thesaurus" vocabURI="http://www.icpsr.umich.edu/thesaurus/subject.html">family</keyword>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <keyword vocab="ICPSR Subject Thesaurus" vocabURI="http://www.icpsr.umich.edu/thesaurus/subject.html">career goals</keyword>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="lablType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="level" type="xs:string"/>
+            <xs:attribute name="vendor" type="xs:string"/>
+            <xs:attribute name="country" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="labl" type="lablType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Label</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">A short description of the parent element. In the variable label, the length of this phrase may depend on the statistical analysis system used (e.g., some versions of SAS permit 40-character labels, while some versions of SPSS permit 120 characters), although the DDI itself imposes no restrictions on the number of characters allowed. A "level" attribute is included to permit coding of the level to which the label applies, i.e. record group, variable group, variable, category group, category, nCube group, nCube, or other study-related materials. The "vendor" attribute was provided to allow for specification of different labels for use with different vendors' software. The attribute "country" allows for the denotation of country-specific labels. The "sdatrefs" attribute records the ID values of all elements within the Summary Data Description section of the Study Description that might apply to the label. These elements include: time period covered, date of collection, nation or country, geographic coverage, geographic unit, unit of analysis, universe, and kind of data.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="locMapType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="dataItem" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="locMap" type="locMapType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Location Map</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element maps individual data entries to one or more physical storage locations. It is used to describe the physical location of aggregate/tabular data in cases where the nCube model is employed.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="locationType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="StartPos" type="xs:string"/>
+            <xs:attribute name="EndPos" type="xs:string"/>
+            <xs:attribute name="width" type="xs:string"/>
+            <xs:attribute name="RecSegNo" type="xs:string"/>
+            <xs:attribute name="fileid" type="xs:IDREF"/>
+            <xs:attribute name="locMap" type="xs:IDREF"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="location" type="locationType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Location</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This is an empty element containing only the attributes listed below. Attributes include "StartPos" (starting position of variable), "EndPos" (ending position of variable), "width" (number of columns the variable occupies), "RecSegNo" (the record segment number, deck or card number the variable is located on), and "fileid", an IDREF link to the fileDscr element for the file that this location is within (this is necessary in cases where the same variable may be coded in two different files, e.g., a logical record length type file and a card image type file). Note that if there is no width or ending position, then the starting position should be the ordinal position in the file, and the file would be described as free-format. The attribute "locMap" is an IDREF to the element locMap and serves as a pointer to indicate that the location information for the nCube's cells (aggregate data) is located in that section.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <location StartPos="55" EndPos="57" width="3" RecSegNo="2" fileid="CARD-IMAGE"/>
+                           <location StartPos="167" EndPos="169" fileid="LRECL"/>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <location locMap="LM"/>
+                        </nCube>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="logRecL" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Logical Record Length</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Logical record length, i.e., number of characters of data in the record.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example"> 
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <logRecL>27</logRecL>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="measureType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="varRef" type="xs:IDREF"/>
+            <xs:attribute name="aggrMeth">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="sum"/>
+                     <xs:enumeration value="average"/>
+                     <xs:enumeration value="count"/>
+                     <xs:enumeration value="mode"/>
+                     <xs:enumeration value="median"/>
+                     <xs:enumeration value="maximum"/>
+                     <xs:enumeration value="minimum"/>
+                     <xs:enumeration value="percent"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherAggrMeth" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="measUnit" type="xs:string"/>
+            <xs:attribute name="scale" type="xs:string"/>
+            <xs:attribute name="origin" type="xs:string"/>
+            <xs:attribute name="additivity">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="stock"/>
+                     <xs:enumeration value="flow"/>
+                     <xs:enumeration value="non-additive"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="measure" type="measureType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Measure</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The element measure indicates the measurement features of the cell content: type of aggregation used, measurement unit, and measurement scale. An origin point is recorded for anchored scales, to be used in determining relative movement along the scale. Additivity indicates whether an aggregate is a stock (like the population at a given point in time) or a flow (like the number of births or deaths over a certain period of time). The non-additive flag is to be used for measures that for logical reasons cannot be aggregated to a higher level - for instance, data that only make sense at a certain level of aggregation, like a classification. Two nCubes may be identical except for their measure - for example, a count of persons by age and percent of persons by age. Measure is an empty element that includes the following attributes: "varRef" is an IDREF; "aggrMeth" indicates the type of aggregation method used, for example 'sum', 'average', 'count'; "measUnit" records the measurement unit, for example 'km', 'miles', etc.; "scale" records unit of scale, for example 'x1', 'x1000'; "origin" records the point of origin for anchored scales;"additivity" records type of additivity such as 'stock', 'flow', 'non-additive'. If a value of "other" is used for the aggrMeth attribute, a term from a controlled vocabulary should be placed in the "otherAggrMeth" attribute, and a the complex element controlledVocabUsed should be used to specify the controlled vocabulary.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="methodType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="dataColl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="anlyInfo" minOccurs="0"/>
+               <xs:element ref="stdyClas" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataProcessing" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="codingInstructions" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="method" type="methodType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Methodology and Processing</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This section describes the methodology and processing involved in a data collection.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="codingInstructionsType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="command" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+            <xs:attribute name="relatedProcesses" type="xs:IDREFS" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="codingInstructions" type="codingInstructionsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Coding Instructions</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Describe specific coding instructions used in data processing, cleaning, assession, or tabulation. Element relatedProcesses allows linking a coding instruction to one or more processes such as dataProcessing, dataAppr, cleanOps, etc. Use the txt element to describe instructions in a human readable form.</xhtml:div>
+               </xhtml:div>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[
+<codingInstructions relatedProcesses="cleanOps_7334" type="recode">
+<txt>recode undocumented/wild codes to missing, i.e., 0.</txt>
+<command formalLanguage="SPSS">RECODE V1 TO V100 (10 THROUGH HIGH = 0)</command>
+</codingInstructions>
+]]>
+								</xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+            </xhtml:div>
+         .</xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="commandType">
+      <xs:simpleContent>
+         <xs:extension base="stringType">
+            <xs:attribute name="formalLanguage" type="xs:string"/>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+
+   <xs:element name="command" type="commandType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Command</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Provide command code for the coding instruction. The formalLanguage attribute identifies the language of the command code.</xhtml:div>
+               </xhtml:div>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[
+<command formalLanguage="SPSS">RECODE V1 TO V100 (10 THROUGH HIGH = 0)</command>
+]]>
+								</xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="dataProcessingType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="dataProcessing" type="dataProcessingType">
+      <xs:annotation>
+				<xs:documentation>
+					<xhtml:div>
+						<xhtml:h1 class="element_title">Data Processing</xhtml:h1>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Description</xhtml:h2>
+							<xhtml:div class="description">Describes various data processing procedures not captured elsewhere in the documentation, such as topcoding, recoding, suppression, tabulation, etc. The "type" attribute supports better classification of this activity, including the optional use of a controlled vocabulary.</xhtml:div>
+						</xhtml:div>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[
+<dataProcessing type="topcoding">The income variables in this study (RESP_INC, HHD_INC, and SS_INC) were topcoded to protect confidentiality.</dataProcessing>
+]]>
+								</xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+					</xhtml:div>
+				</xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="miType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="phraseType">
+            <xs:attribute name="varRef" type="xs:IDREF" use="required"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="mi" type="miType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Mathematical Identifier</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Token element containing the smallest unit in the mrow that carries meaning.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="mrowType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="mi" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="mrow" type="mrowType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Mathematical Row</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element is a wrapper containing the presentation expression mi. It creates a single string without spaces consisting of the individual elements described within it. It can be used to create a single variable by concatenating other variables into a single string. It is used to create linking variables composed of multiple non-contiguous parts, or to define unique strings for various category values of a single variable.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="nCubeType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="location" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="universe" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="imputation" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="security" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="embargo" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="respUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="anlysUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="verStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="purpose" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dmns" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="measure" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="name" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+            <xs:attribute name="methrefs" type="xs:IDREFS"/>
+            <xs:attribute name="pubrefs" type="xs:IDREFS"/>
+            <xs:attribute name="access" type="xs:IDREFS"/>
+            <xs:attribute name="dmnsQnty" type="xs:string"/>
+            <xs:attribute name="cellQnty" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="nCube" type="nCubeType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">nCube</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>Describes the logical structure of an n-dimensional array, in which each coordinate intersects with every other dimension at a single point. The nCube has been designed for use in the markup of aggregate data. Repetition of the following elements is provided to support multi-language content: anlysUnit, embargo, imputation, purpose, respUnit, and security. This element includes the following attributes:</xhtml:p>
+                     <xhtml:p>The attribute "name" includes a short label for the nCube. Following the rules of many statistical analysis systems such as SAS and SPSS, names are usually up to eight characters long.</xhtml:p>
+                     <xhtml:p>The "sdatrefs" are summary data description references which record the ID values of all elements within the summary data description section of the Study Description which might apply to the nCube. These elements include: time period covered, date of collection, nation or country, geographic coverage, geographic unit, unit of analysis, universe, and kind of data.</xhtml:p>
+                     <xhtml:p>The "methrefs" are methodology and processing references which record the ID values of all elements within the study methodology and processing section of the Study Description which might apply to the nCube. These elements include information on data collection and data appraisal (e.g., sampling, sources, weighting, data cleaning, response rates, and sampling error estimates).</xhtml:p>
+                     <xhtml:p>The "pubrefs" attribute provides a link to publication/citation references and records the ID values of all citations elements in Other Study Description Materials or Other Study-Related Materials that pertain to this nCube.</xhtml:p>
+                     <xhtml:p>The "access" attribute records the ID values of all elements in the Data Access section that describe access conditions for this nCube. The "dmnsQnty" attribute notes the number of dimensions in the nCube. The "cellQnty" attribute indicates the total number of cells in the nCube.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="nCubeGrpType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="concept" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="defntn" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="universe" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="type" default="other">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="section"/>
+                     <xs:enumeration value="multipleResp"/>
+                     <xs:enumeration value="grid"/>
+                     <xs:enumeration value="display"/>
+                     <xs:enumeration value="repetition"/>
+                     <xs:enumeration value="subject"/>
+                     <xs:enumeration value="version"/>
+                     <xs:enumeration value="iteration"/>
+                     <xs:enumeration value="analysis"/>
+                     <xs:enumeration value="pragmatic"/>
+                     <xs:enumeration value="record"/>
+                     <xs:enumeration value="file"/>
+                     <xs:enumeration value="randomized"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherType" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="nCube" type="xs:IDREFS"/>
+            <xs:attribute name="nCubeGrp" type="xs:IDREFS"/>
+            <xs:attribute name="name" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+            <xs:attribute name="methrefs" type="xs:IDREFS"/>
+            <xs:attribute name="pubrefs" type="xs:IDREFS"/>
+            <xs:attribute name="access" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="nCubeGrp" type="nCubeGrpType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">nCube Group</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>A group of nCubes that may share a common subject, arise from the interpretation of a single question, or are linked by some other factor. This element makes it possible to identify all nCubes derived from a simple presentation table, and to provide the original table title and universe, as well as reference the source. Specific nesting patterns can be described using the attribute nCubeGrp.</xhtml:p>
+                     <xhtml:p>nCube groups are also created this way in order to permit nCubes to belong to multiple groups, including multiple subject groups, without causing overlapping groups. nCubes that are linked by the same use of the same variable need not be identified by an nCubeGrp element because they are already linked by a common variable element. Note that as a result of the strict sequencing required by XML, all nCube Groups must be marked up before the Variable element is opened. That is, the mark-up author cannot mark up a nCube Group, then mark up its constituent nCubes, then mark up another nCube Group.</xhtml:p>
+                     <xhtml:p>The "type" attribute refers to the general type of grouping of the nCubes. Specific nCube Groups, included within the 'type' attribute, are: </xhtml:p>
+                     <xhtml:p>Display: nCubes that are part of the same presentation table.</xhtml:p>
+                     <xhtml:p>Subject: nCubes that address a common topic or subject, e.g., income, poverty, children. </xhtml:p>
+                     <xhtml:p>Iteration: nCubes that appear in different sections of the data file measuring a common subject in different ways, e.g., using different universes, units of measurement, etc. </xhtml:p>
+                     <xhtml:p>Pragmatic: An nCube group without shared properties.</xhtml:p>
+                     <xhtml:p>Record: nCubes from a single record in a hierarchical file.</xhtml:p>
+                     <xhtml:p>File: nCube from a single file in a multifile study.</xhtml:p>
+                     <xhtml:p>Other: nCubes that do not fit easily into any of the categories listed above, e.g., a group of nCubes whose documentation is in another language. A term from a controlled vocabulary may be placed into the otherType attribute if this value is used.</xhtml:p>
+                     <xhtml:p>The otherType attribute should only be used when applying a controlled vocabulary, and when the type attribute has been given a value of "other". Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs.</xhtml:p>
+                     <xhtml:p>The "nCube" attribute is used to reference all the IDs of the nCubes belonging to the group.</xhtml:p>
+                     <xhtml:p>The "nCubeGrp" attribute is used to reference all the subsidiary nCube groups which nest underneath the current nCubeGrp. This allows for encoding of a hierarchical structure of nCube groups.</xhtml:p>
+                     <xhtml:p>The attribute "name" provides a name, or short label, for the group.</xhtml:p>
+                     <xhtml:p>The "sdatrefs" are summary data description references that record the ID values of all elements within the summary data description section of the Study Description that might apply to the group. These elements include: time period covered, date of collection, nation or country, geographic coverage, geographic unit, unit of analysis, universe, and kind of data.</xhtml:p>
+                     <xhtml:p>The "methrefs" are methodology and processing references which record the ID values of all elements within the study methodology and processing section of the Study Description which might apply to the group. These elements include information on data collection and data appraisal (e.g., sampling, sources, weighting, data cleaning, response rates, and sampling error estimates).</xhtml:p>
+                     <xhtml:p>The "pubrefs" attribute provides a link to publication/citation references and records the ID values of all citations elements within Section codeBook/stdyDscr/othrStdyMat or codeBook/otherMat that pertain to this nCube group.</xhtml:p>
+                     <xhtml:p>The "access" attribute records the ID values of all elements in codeBook/stdyDscr/dataAccs of the document that describe access conditions for this nCube group.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="nationType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="conceptualTextType">
+            <xs:attribute name="abbr" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="nation" type="nationType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Country</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Indicates the country or countries covered in the file. Attribute "abbr" may be used to list common abbreviations; use of ISO country codes is recommended. Maps to Dublin Core Coverage element. Inclusion of this element is recommended. For forward-compatibility, DDI 3 XHTML tags may be used in this element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nation abbr="GB">United Kingdom</nation>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="northBL" type="phraseType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">North Bounding Latitude</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The northernmost coordinate delimiting the geographic extent of the dataset. A valid range of values, expressed in decimal degrees (positive east and positive north), is: -90,0 &lt;= North Bounding Latitude Value &lt;= 90,0 ; North Bounding Latitude Value = South Bounding Latitude Value</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="notesType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="tableAndTextType">
+            <xs:attribute name="type" type="xs:string"/>
+            <xs:attribute name="subject" type="xs:string"/>
+            <xs:attribute name="level" type="xs:string"/>
+            <xs:attribute name="resp" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+            <xs:attribute name="parent" type="xs:IDREFS" use="optional"/>
+            <xs:attribute name="sameNote" type="xs:IDREF" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="notes" type="notesType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Notes and comments</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>For clarifying information/annotation regarding the parent element.</xhtml:p>
+                     <xhtml:p>The attributes for notes permit a controlled vocabulary to be developed ("type" and "subject"), indicate the "level" of the DDI to which the note applies (study, file, variable, etc.), and identify the author of the note ("resp").</xhtml:p>
+                     <xhtml:p>The parent attribute is used to support capturing information obtained while preparing files for translation to DDI 3. It provides the ID(s) of the element this note is related to.</xhtml:p>
+                     <xhtml:p>The sameNote attribute is used to support capturing information obtained while preparing files for translation to DDI 3. If the same note is used multiple times all the parent IDs can be captured in a single note and all duplicate notes can reference the note containing the related to references in the attribute sameNote.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docDscr>
+                           <verStmt>
+                              <notes resp="Jane Smith">Additional information on derived variables  has been added to this marked-up version of the documentation.</notes>
+                           </verStmt>
+                        </docDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docDscr>
+                           <citation>
+                              <notes resp="Jane Smith">This citation was prepared by the archive based on information received from the markup authors.</notes>
+                           </citation>
+                        </docDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docSrc>
+                           <verStmt>
+                              <notes resp="Jane Smith">The source codebook was produced from original hardcopy materials using  Optical Character Recognition (OCR).</notes>
+                           </verStmt>
+                        </docSrc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docSrc>
+                           <notes>A machine-readable version of the source codebook was supplied by the Zentralarchiv</notes>
+                        </docSrc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docDscr>
+                           <notes>This Document Description, or header information, can be used  within an electronic resource discovery environment.</notes>
+                        </docDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <stdyDscr>
+                           <verStmt>
+                              <notes resp="Jane Smith">Data for 1998 have been added to this version of the data collection.</notes>
+                           </verStmt>
+                        </stdyDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <stdyDscr>
+                           <citation>
+                              <notes resp="Jane Smith">This citation was sent to ICPSR by the  agency depositing the data.</notes>
+                           </citation>
+                        </stdyDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <stdyInfo>
+                           <notes>Data on employment and income refer to the preceding year, although demographic data refer to the time of the survey.</notes>
+                        </stdyInfo>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <method>
+                           <notes>Undocumented codes were found in this data collection. Missing data are represented by blanks.</notes>
+                        </method>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <method>
+                           <notes>For this collection, which focuses on employment, unemployment, and gender equality, data from EUROBAROMETER 44.3: HEALTH CARE ISSUES AND PUBLIC SECURITY, FEBRUARY-APRIL 1996 (ICPSR 6752) were merged with an oversample.</notes>
+                        </method>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <setAvail>
+                           <notes> Data from the Bureau of Labor Statistics used in the analyses for the final report are not provided as part of this collection.</notes>
+                        </setAvail>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataAccs>
+                           <notes>Users should note that this is a beta version of the data. The investigators therefore request that users who encounter any problems with the dataset contact them at the above address.</notes>
+                        </dataAccs>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileStrc>
+                           <notes>The number of arrest records for an individual is dependent on the number of arrests an offender had.</notes>
+                        </fileStrc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileTxt>
+                           <verStmt>
+                              <notes>Data for all previously-embargoed variables are now available in  this version of the file.</notes>
+                           </verStmt>
+                        </fileTxt>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileDscr>
+                           <notes>There is a restricted version of this file containing confidential information,  access to which is controlled by the principal investigator.</notes>
+                        </fileDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <varGrp>
+                           <notes>This variable group was created for the purpose of combining all derived variables.</notes>
+                        </varGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <varGrp>
+                           <notes source="archive" resp="John Data">This variable group and all other variable groups in this data file were organized according to a schema developed by the adhoc advisory committee. </notes>
+                        </varGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCubeGrp>
+                           <notes>This nCube Group was created for the purpose of presenting a cross-tabulation between variables "Tenure" and "Age of householder."</notes>
+                        </nCubeGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <valrng>
+                           <notes subject="political party">Starting with Euro-Barometer 2 the coding of this variable has been standardized following an approximate ordering of each country's political parties along a "left" to "right" continuum in the first digit of the codes. Parties coded 01-39 are generally considered on the "left", those coded 40-49 in the "center", and those coded 60-89 on the "right" of the political spectrum. Parties coded 50-59 cannot be readily located in the traditional meaning of "left" and "right". The second digit of the codes is not significant to the "left-right" ordering. Codes 90-99 contain the response "other party" and various missing data responses. Users may modify these codings or part of these codings in order to suit their specific needs. </notes>
+                        </valrng>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <invalrng>
+                           <notes>Codes 90-99 contain the response "other party" and various missing data responses. </notes>
+                        </invalrng>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <verStmt>
+                              <notes>The labels for categories 01 and 02 for this variable, were inadvertently switched in the first version of this variable and have now been corrected.</notes>
+                           </verStmt>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <notes>This variable was created by recoding location of residence to Census regions.</notes>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <verStmt>
+                              <notes>The labels for categories 01 and 02 in dimension 1 were inadvertently switched in the first version of the cube, and have now been corrected.</notes>
+                           </verStmt>
+                        </nCube>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <notes>This nCube was created to meet the needs of local low income programs in determining eligibility for federal funds.</notes>
+                        </nCube>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dataDscr>
+                           <notes>The variables in this study are identical to earlier waves. </notes>
+                        </dataDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <otherMat>
+                           <notes>Users should be aware that this questionnaire was modified  during the CAI process.</notes>
+                        </otherMat>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="origArch" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Archive Where Study Originally Stored</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Archive from which the data collection was obtained; the originating archive. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <origArch>Zentralarchiv fuer empirische Sozialforschung</origArch>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="othIdType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="type" type="xs:string"/>
+            <xs:attribute name="role" type="xs:string"/>
+            <xs:attribute name="affiliation" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="othId" type="othIdType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Other Identifications /Acknowledgments</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Statements of responsibility not recorded in the title and statement of responsibility areas. Indicate here the persons or bodies connected with the work, or significant persons or bodies connected with previous editions and not already named in the description. For example, the name of the person who edited the marked-up documentation might be cited in codeBook/docDscr/rspStmt/othId, using the "role" and "affiliation" attributes. Other identifications/acknowledgments for data collection (codeBook/stdyDscr/citation/rspStmt/othId) maps to Dublin Core Contributor element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <othId role="editor" affiliation="INRA">Jane Smith</othId>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="othRefsType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="abstractTextType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:group ref="FORM"/>
+                  <xs:group ref="xhtml:BlkNoForm.mix"/>
+                  <xs:element ref="citation"/>
+               </xs:choice>
+            </xs:sequence>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="othRefs" type="othRefsType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Other References Notes</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Indicates other pertinent references. Can take the form of bibliographic citations. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <othRefs>Part II of the documentation, the Field Representative's Manual, is provided in hardcopy form only.</othRefs>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="otherMatType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:choice>
+               <xs:sequence>
+                  <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="table" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="citation" minOccurs="0"/>
+                  <xs:element ref="otherMat" minOccurs="0" maxOccurs="unbounded"/>
+               </xs:sequence>
+            </xs:choice>
+            <xs:attribute name="type" type="xs:string"/>
+            <xs:attribute name="level" type="xs:NMTOKEN" use="required"/>
+            <xs:attribute name="URI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="otherMat" type="otherMatType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Other Study-Related Materials</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>This section allows for the inclusion of other materials that are related to the study as identified and labeled by the DTD/Schema users (encoders). The' materials may be entered as PCDATA (ASCII text) directly into the document (through use of the "txt" element). This ection may also serve as a "container" for other electronic materials such as setup files by providing a brief description of the study-related materials accompanied by the attributes "type" and "level" defining the material further. The "URI" attribute may be used to indicate the location of the other study-related materials.</xhtml:p>
+                     <xhtml:p>Other Study-Related Materials may include: questionnaires, coding notes, SPSS/SAS/Stata setup files (and others), user manuals, continuity guides, sample computer software programs, glossaries of terms, interviewer/project instructions, maps, database schema, data dictionaries, show cards, coding information, interview schedules, missing values information, frequency files, variable maps, etc.</xhtml:p>
+                     <xhtml:p>The "level" attribute is used to clarify the relationship of the other materials to components of the study. Suggested values for level include specifications of the item level to which the element applies: e.g., level= data; level=datafile; level=studydsc; level=study. The URI attribute need not be used in every case; it is intended for capturing references to other materials separate from the codebook itself. In Section 5, Other Material is recursively defined.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="othrStdyMatType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="relMat" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="relStdy" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="relPubl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="othRefs" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="othrStdyMat" type="othrStdyMatType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Other Study Description Materials</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Other materials relating to the study description. This section describes other materials that are related to the study description that are primarily descriptions of the content and use of the study, such as appendices, sampling information, weighting details, methodological and technical details, publications based upon the study content, related studies or collections of studies, etc. This section may point to other materials related to the description of the study through use of the generic citation element, which is available for each element in this section. This maps to Dublin Core Relation element. Note that codeBook/otherMat (Other Study-Related Materials), should be used for materials used in the production of the study or useful in the analysis of the study. The materials in codeBook/otherMat may be entered as PCDATA (ASCII text) directly into the document (through use of the txt element). That section may also serve as a "container" for other electronic materials by providing a brief description of the study-related materials accompanied by the "type" and "level" attributes further defining the materials. Other Study-Related Materials in codeBook/otherMat may include: questionnaires, coding notes, SPSS/SAS/Stata setup files (and others), user manuals, continuity guides, sample computer software programs, glossaries of terms, interviewer/project instructions, maps, database schema, data dictionaries, show cards, coding information, interview schedules, missing values information, frequency files, variable maps, etc.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="parTitl" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Parallel Title</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Title translated into another language.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <titl>Politbarometer West [Germany], Partial Accumulation, 1977-1995</titl>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <parTitl>Politbarometer, 1977-1995: Partielle Kumulation</parTitl>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="physLocType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="type" type="xs:string"/>
+            <xs:attribute name="recRef" type="xs:IDREF"/>
+            <xs:attribute name="startPos" type="xs:string"/>
+            <xs:attribute name="width" type="xs:string"/>
+            <xs:attribute name="endPos" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="physLoc" type="physLocType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1></xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>This is an empty element containing only the attributes listed below. Attributes include "type" (type of file structure: rectangular, hierarchical, two-dimensional, relational), "recRef" (IDREF link to the appropriate file or recGrp element within a file), "startPos" (starting position of variable or data item), "endPos" (ending position of variable or data item), "width" (number of columns the variable/data item occupies), "RecSegNo" (the record segment number, deck or card number the variable or data item is located on), and "fileid" (an IDREF link to the fileDscr element for the file that includes this physical location).</xhtml:p>
+                     <xhtml:p>Remarks: Where the same variable is coded in two different files, e.g., a fixed format file and a relational database file, simply repeat the physLoc element with the alternative location information. Note that if there is no width or ending position, then the starting position should be the ordinal position in the file, and the file would be described as free-format. New attributes will be added as other storage formats are described within the DDI.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <physLoc type="rectangular" recRef="R1" startPos="55" endPos="57" width="3"/>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <physLoc type="hierarchical" recRef="R6" startPos="25" endPos="25" width="1"/>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="pointType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="gringLat"/>
+               <xs:element ref="gringLon"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="point" type="pointType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Point</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">0-dimensional geometric primitive, representing a position, but not having extent. In this declaration, point is limited to a longitude/latitude coordinate system.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="polygonType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="point" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="polygon" type="polygonType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Polygon</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The minimum polygon that covers a geographical area, and is delimited by at least 4 points (3 sides), in which the last point coincides with the first point.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="postQTxt" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">PostQuestion Text</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Text describing what occurs after the literal question has been asked.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <qstn>
+                              <postQTxt>The next set of questions will ask about your financial situation.</postQTxt> 
+                           </qstn>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="preQTxt" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">PreQuestion Text</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Text describing a set of conditions under which a question might be asked.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <qstn>
+                              <preQTxt>For those who did not go away on a holiday of four days or more in 1985...</preQTxt>
+                           </qstn>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="ProcStat" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Processing Status</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Processing status of the file. Some data producers and social science data archives employ data processing strategies that provide for release of data and documentation at various stages of processing.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <ProcStat>Available from the DDA. Being processed.</ProcStat>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <ProcStat>The principal investigator notes that the data in Public Use Tape 5 are released prior to final cleaning and editing, in order to provide prompt access to the NMES data by the research and policy community.</ProcStat>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="prodDate" type="simpleTextAndDateType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Date of Production</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Date when the marked-up document/marked-up document source/data collection/other material(s) were produced (not distributed or archived). The ISO standard for dates (YYYY-MM-DD) is recommended for use with the date attribute. Production date for data collection (codeBook/stdyDscr/citation/prodStmt/prodDate) maps to Dublin Core Date element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <prodDate date="1999-01-25">January 25, 1999</prodDate>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="prodPlac" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Place of Production</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Address of the archive or organization that produced the work.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <prodPlac>Ann Arbor, MI: Inter-university Consortium for Political and Social Research</prodPlac>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="prodStmtType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="producer" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="copyright" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="prodDate" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="prodPlac" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="software" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="fundAg" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="grantNo" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="prodStmt" type="prodStmtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Production Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Production statement for the work at the appropriate level: marked-up document; marked-up document source; study; study description, other material; other material for study.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="producerType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="abbr" type="xs:string"/>
+            <xs:attribute name="affiliation" type="xs:string"/>
+            <xs:attribute name="role" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="producer" type="producerType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Producer</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The producer is the person or organization with the financial or administrative responsibility for the physical processes whereby the document was brought into existence. Use the "role" attribute to distinguish different stages of involvement in the production process, such as original producer. Producer of data collection (codeBook/stdyDscr/citation/prodStmt/producer) maps to Dublin Core Publisher element. The "producer" in the Document Description should be the agency or person that prepared the marked-up document.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <producer abbr="ICPSR" affiliation="Institute for Social Research">Inter-university Consortium for Political and Social Research</producer>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <producer abbr="MNPoll" affiliation="Minneapolis Star Tibune Newspaper"
+                                  role="original producer">Star Tribune Minnesota Poll</producer>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <producer abbr="MRDC" affiliation="University of Minnesota" role="final production">Machine Readable Data Center</producer>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="purposeType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+            <xs:attribute name="methrefs" type="xs:IDREFS"/>
+            <xs:attribute name="pubrefs" type="xs:IDREFS"/>
+            <xs:attribute name="URI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="purpose" type="purposeType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1></xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Explains the purpose for which a particular nCube was created.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <purpose>Meets reporting requirements for the Federal Reserve Board</purpose>
+                        </nCube>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="qstnType" mixed="true">
+      <xs:complexContent>
+         <xs:restriction base="abstractTextType">
+            <xs:sequence>
+               <xs:choice minOccurs="0" maxOccurs="unbounded">
+                  <xs:group ref="PHRASE"/>
+                  <xs:group ref="FORM"/>
+                  <xs:group ref="xhtml:BlkNoForm.mix"/>
+                  <xs:element ref="preQTxt"/>
+                  <xs:element ref="qstnLit"/>
+                  <xs:element ref="postQTxt"/>
+                  <xs:element ref="forward"/>
+                  <xs:element ref="backward"/>
+                  <xs:element ref="ivuInstr"/>
+               </xs:choice>               
+            </xs:sequence>
+            <xs:attribute name="qstn" type="xs:IDREF"/>
+            <xs:attribute name="var" type="xs:IDREFS"/>
+            <xs:attribute name="seqNo" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+            <xs:attribute name="responseDomainType" use="optional">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="text"/>
+                     <xs:enumeration value="numeric"/>
+                     <xs:enumeration value="code"/>
+                     <xs:enumeration value="category"/>
+                     <xs:enumeration value="datetime"/>
+                     <xs:enumeration value="geographic"/>
+                     <xs:enumeration value="multiple"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherResponseDomainType" type="xs:NMTOKEN" use="optional"/>
+         </xs:restriction>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="qstn" type="qstnType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Question</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The question element may have mixed content. The element itself may contain text for the question, with the subelements being used to provide further information about the question. Alternatively, the question element may be empty and only the subelements used. The element has a unique question ID attribute which can be used to link a variable with other variables where the same question has been asked. This would allow searching for all variables that share the same question ID, perhaps because the questions was asked several times in a panel design. The "ID" attribute contains a unique identifier for the question. "Var" references the ID(s) of the variable(s) relating to the question. The attribute "seqNo" refers to the sequence number of the question. The attribute "sdatrefs" may be used to reference elements in the summary data description section of the Study Description which might apply to this question. These elements include: time period covered, date of collection, nation or country, geographic coverage, geographic unit, unit of analysis, universe, and kind of data. The responseDomainType attribute was added to capture the specific DDI 3 response domain type to facilitate translation between DDI 2 and DDI 3. If this is given a value of "other" then a term from a controlled vocabulary should be put into the "otherResponseDomainType" attribute.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <qstn ID="Q125">When you get together with your friends, would you say you discuss political matters frequently, occasionally, or never?</qstn>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="qstnLitType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="qstnLit" type="qstnLitType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Literal Question</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Text of the actual, literal question asked.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <qstn>
+                              <qstnLit>Why didn't you go away in 1985?</qstnLit>
+                           </qstn>
+                        </var>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="rangeType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:attribute name="UNITS" default="INT">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="INT"/>
+                     <xs:enumeration value="REAL"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="min" type="xs:string"/>
+            <xs:attribute name="minExclusive" type="xs:string"/>
+            <xs:attribute name="max" type="xs:string"/>
+            <xs:attribute name="maxExclusive" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="range" type="rangeType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Value Range</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This is the actual range of values. The "UNITS" attribute permits the specification of integer/real numbers. The "min" and "max" attributes specify the lowest and highest values that are part of the range. The "minExclusive" and "maxExclusive" attributes specify values that are immediately outside the range. This is an empty element consisting only of its attributes.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">For example, x &lt; 1 or 10 &lt;= x &lt; 20 would be expressed as:
+               	<xhtml:samp class="xml_sample"><![CDATA[
+                        <range maxExclusive="1"/>
+                     ]]></xhtml:samp>
+               	     <xhtml:samp class="xml_sample"><![CDATA[
+                        <range min="10" maxExclusive="20"/>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="recDimnsnType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="varQnty" minOccurs="0"/>
+               <xs:element ref="caseQnty" minOccurs="0"/>
+               <xs:element ref="logRecL" minOccurs="0"/>
+            </xs:sequence>
+            <xs:attribute name="level" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="recDimnsn" type="recDimnsnType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Dimensions (of record)</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information about the physical characteristics of the record. The "level" attribute on this element should be set to "record".</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="recGrpType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="recDimnsn" minOccurs="0"/>
+            </xs:sequence>
+            <xs:attribute name="recGrp" type="xs:IDREFS"/>
+            <xs:attribute name="rectype" type="xs:string"/>
+            <xs:attribute name="keyvar" type="xs:IDREFS"/>
+            <xs:attribute name="rtypeloc" type="xs:string"/>
+            <xs:attribute name="rtypewidth" type="xs:string" default="1"/>
+            <xs:attribute name="rtypevtype" default="numeric">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="numeric"/>
+                     <xs:enumeration value="character"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="recidvar" type="xs:string"/>
+
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="recGrp" type="recGrpType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Record or Record Group</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Used to describe record groupings if the file is hierarchical or relational. The attribute "recGrp" allows a record group to indicate subsidiary record groups which nest underneath; this allows for the encoding of a hierarchical structure of record groups. The attribute "rectype" indicates the type of record, e.g., "A records" or "Household records." The attribute "keyvar" is an IDREF that provides the link to other record types. In a hierarchical study consisting of individual and household records, the "keyvar" on the person record will indicate the household to which it belongs. The attribute "rtypeloc" indicates the starting column location of the record type indicator variable on each record of the data file. The attribute "rtypewidth" specifies the width, for files with many different record types. The attribute "rtypevtype" specifies the type of the indicator variable. The "recidvar" indicates the variable that identifies the record group.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileStrc type="hierarchical"> 
+                           <recGrp rectype="Person" keyvar="HHDID">
+                              <labl>CPS 1999 Person-Level Record</labl>
+                              <recDimnsn>
+                                 <varQnty>133</varQnty>
+                                 <caseQnty>1500</caseQnty>
+                                 <logRecL>852</logRecL>
+                              </recDimnsn>
+                           </recGrp> 
+                        </fileStrc>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="recNumTot" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Overall Number of Records</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Overall record count in file. Particularly helpful in instances such as files with multiple cards/decks or records per case.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dimensns>
+                           <recNumTot>2400</recNumTot>
+                        </dimensns>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="recPrCas" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Records per Case</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Records per case in the file. This element should be used for card-image data or other files in which there are multiple records per case.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dimensns>
+                           <recPrCas>5</recPrCas>
+                        </dimensns>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Records per Case</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Records per case in the file. This element   should be used for card-image data or other files in which there are multiple records per case.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <dimensns>
+                           <recPrCas>5</recPrCas>
+                        </dimensns>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="relMatType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="materialReferenceType">
+            <xs:attribute name="callno" type="xs:string"/>
+            <xs:attribute name="label" type="xs:string"/>
+            <xs:attribute name="media" type="xs:string"/>
+            <xs:attribute name="type" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="relMat" type="relMatType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Related Materials</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Describes materials related to the study description, such as appendices, additional information on sampling found in other documents, etc. Can take the form of bibliographic citations. This element can contain either PCDATA or a citation or both, and there can be multiple occurrences of both the citation and PCDATA within a single element. May consist of a single URI or a series of URIs comprising a series of citations/references to external materials which can be objects as a whole (journal articles) or parts of objects (chapters or appendices in articles or documents).</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <relMat> Full details on the research design and procedures, sampling methodology, content areas, and questionnaire design, as well as percentage distributions by respondent's sex, race, region, college plans, and drug use, appear in the annual ISR volumes MONITORING THE FUTURE: QUESTIONNAIRE RESPONSES FROM THE NATION'S HIGH SCHOOL SENIORS.</relMat>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <relMat>Current Population Survey, March 1999: Technical Documentation  includes an abstract, pertinent information about the file, a glossary, code lists, and a data dictionary. One copy accompanies each file order. When ordered separately, it is available from Marketing Services Office, Customer Service Center, Bureau of the Census, Washington, D.C. 20233. </relMat>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <relMat>A more precise explanation regarding the CPS sample design is provided in Technical Paper 40, The Current Population Survey: Design and Methodology. Chapter 5 of this paper provides documentation on the weighting procedures for the CPS both with and without supplement questions.</relMat>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="relPubl" type="materialReferenceType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Related Publications</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Bibliographic and access information aboutvarticles and reports based on the data in this collection. Can take the formbof bibliographic citations. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <relPubl>Economic Behavior Program Staff. SURVEYS OF CONSUMER FINANCES. Annual volumes 1960 through 1970. Ann Arbor, MI: Institute for Social Research.</relPubl>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <relPubl>Data from the March Current Population Survey are published most  frequently in the Current Population Reports P- 20 and P- 60 series. These  reports are available from the Superintendent of Documents, U. S. Government  Printing Office, Washington, DC 20402. They also are available on the INTERNET  at http://www. census. gov. Forthcoming reports will be cited in Census and  You, the Monthly Product Announcement (MPA), and the Bureau of the Census  Catalog and Guide. </relPubl>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+      
+   <xs:element name="relStdy" type="materialReferenceType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Related Studies</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information on the relationship of the current data collection to others (e.g., predecessors, successors, other waves or rounds) or to other editions of the same file. This would include the names of additional data collections generated from the same data collection vehicle plus other collections directed at the same general topic. Can take the form of bibliographic citations. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <relStdy>ICPSR distributes a companion study to this collection titled FEMALE LABOR FORCE PARTICIPATION AND MARITAL INSTABILITY, 1980: [UNITED STATES] (ICPSR 9199).</relStdy>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="resInstruType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="conceptualTextType">
+            <xs:attribute name="type" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="resInstru" type="resInstruType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Type of Research Instrument</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The type of data collection instrument used. "Structured" indicates an instrument in which all respondents are asked the same questions/tests, possibly with precoded answers. If a small portion of such a questionnaire includes open-ended questions, provide appropriate comments. "Semi-structured" indicates that the research instrument contains mainly open-ended questions. "Unstructured" indicates that in-depth interviews were conducted. The "type" attribute is included to permit the development of a controlled vocabulary for this element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <resInstru>structured</resInstru>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="respRate" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Response Rate</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The percentage of sample members who provided information. This may include a broader description of stratified response rates, information affecting resonse rates etc.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <respRate>For 1993, the estimated inclusion rate for TEDS-eligible providers was 91 percent, with the inclusion rate for all treatment providers estimated at 76 percent (including privately and publicly funded providers).</respRate>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <respRate>The overall response rate was 82%, although retail firms with an annual sales volume of more than $5,000,000 were somewhat less likely to respond.</respRate>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="respUnit" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Response Unit</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Provides information regarding who provided the information contained within the variable/nCube, e.g., respondent, proxy, interviewer. This element may be repeated only to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <respUnit>Head of household</respUnit>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <respUnit>Head of household</respUnit>
+                        </nCube>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="restrctn" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Restrictions</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Any restrictions on access to or use of the collection such as privacy certification or distribution restrictions should be indicated here. These can be restrictions applied by the author, producer, or disseminator of the data collection. If the data are restricted to only a certain class of user, specify which type. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <restrctn> In preparing the data file(s) for this collection, the National Center for Health Statistics (NCHS) has removed direct identifiers and characteristics that might lead to identification of data subjects. As an additional precaution NCHS requires, under Section 308(d) of the Public Health Service Act (42 U.S.C. 242m), that data collected by NCHS not be used for any purpose other than statistical analysis and reporting. NCHS further requires that analysts not use the data to learn the identity of any persons or establishments and that the director of NCHS be notified if any identities are inadvertently discovered. ICPSR member institutions and other users ordering data from ICPSR are expected to adhere to these restrictions.</restrctn>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <restrctn> ICPSR obtained these data from the World Bank under the terms of a contract which states that the data are for the sole use of ICPSR and may not be sold or provided to third parties outside of ICPSR membership. Individuals at institutions that are not members of the ICPSR may obtain these data directly from the World Bank.</restrctn>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="rowType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="entry" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="rowsep" type="xs:string"/>
+            <xs:attribute name="valign">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="top"/>
+                     <xs:enumeration value="middle"/>
+                     <xs:enumeration value="bottom"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="row" type="rowType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Table Row</xhtml:h1>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="rspStmtType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="AuthEnty" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="othId" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="rspStmt" type="rspStmtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Responsibility Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Responsibility for the creation of the work at the appropriate level: marked-up document; marked-up document source; study; study description, other material; other material for study.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="sampProc" type="conceptualTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Sampling Procedure</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The type of sample and sample design used to select the survey respondents to represent the population. May include reference to the target sample size and the sampling fraction.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <sampProc>National multistage area probability sample</sampProc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <sampProc>Simple random sample</sampProc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <sampProc>Stratified random sample</sampProc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <sampProc>Quota sample</sampProc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <sampProc>The 8,450 women interviewed for the NSFG, Cycle IV, were drawn from households in which  someone had been interviewed for the National Health Interview Survey (NHIS), between October 1985 and March 1987.</sampProc>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <sampProc>Samples sufficient to produce approximately 2,000 families with completed interviews were drawn in each state. Families containing one or more Medicaid or uninsured persons were oversampled. XHTML content may be used for formatting.</sampProc>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="security" type="simpleTextAndDateType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Security</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Provides information regarding levels of access, e.g., public, subscriber, need to know. The ISO standard for dates (YYYY-MM-DD) is recommended for use with the date attribute.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <security date="1998-05-10"> This variable has been recoded for reasons of confidentiality. Users should contact the archive for information on obtaining access.</security>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <security date="1998-05-10">Variable(s) within this nCube have been recoded for reasons of confidentiality.  Users should contact the archive for information on obtaining access.</security>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="serInfo" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Series Information</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Contains a history of the series and a summary of those features that apply to the series as a whole.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <serInfo>The Current Population Survey (CPS) is a household sample survey conducted monthly by the Census Bureau to provide estimates of employment, unemployment, and other characteristics of the general labor force, estimates of the population as a whole, and estimates of various subgroups in the population. The entire non-institutionalized population of the United States is sampled to obtain the respondents for this survey series.</serInfo>
+                     ]]></xhtml:samp>
+                 </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="serNameType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="abbr" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="serName" type="serNameType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Series Name</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The name of the series to which the work belongs.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <serName abbr="CPS">Current Population Survey Series</serName>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="serStmtType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="serName" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="serInfo" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="URI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="serStmt" type="serStmtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Series Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Series statement for the work at the appropriate level: marked-up document; marked-up document source; study; study description, other material; other material for study. The URI attribute is provided to point to a central Internet repository of series information. Repeat this field if the study is part of more than one series. Repetition of the internal content should be used to support multiple languages only.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="setAvailType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="accsPlac" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="origArch" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="avlStatus" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="collSize" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="complete" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="fileQnty" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="media" type="xs:string"/>
+            <xs:attribute name="callno" type="xs:string"/>
+            <xs:attribute name="label" type="xs:string"/>
+            <xs:attribute name="type" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="setAvail" type="setAvailType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Data Set Availability</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information on availability and storage of the collection. The "media" attribute may be used in combination with any of the subelements. See Location of Data Collection.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="softwareType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextAndDateType">
+            <xs:attribute name="version" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="software" type="softwareType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Software used in Production</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Software used to produce the work. A "version" attribute permits specification of the software version number. The "date" attribute is provided to enable specification of the date (if any) for the software release. The ISO standard for dates (YYYY-MM-DD) is recommended for use with the date attribute.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docDscr>
+                           <citation>
+                              <prodStmt>
+                                 <software version="1.0">MRDC Codebook Authoring Tool</software>
+                              </prodStmt>
+                           </citation>
+                        </docDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docDscr>
+                           <citation>
+                              <prodStmt>
+                                 <software version="8.0">Arbortext Adept Editor</software>
+                              </prodStmt>
+                           </citation>
+                        </docDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <docDscr>
+                           <docSrc>
+                              <prodStmt>
+                                 <software version="4.0">PageMaker</software>
+                              </prodStmt>
+                           </docSrc>
+                        </docDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <stdyDscr>
+                           <citation>
+                              <prodStmt>
+                                 <software version="6.12">SAS</software>
+                              </prodStmt>
+                           </citation>
+                        </stdyDscr>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <fileTxt>
+                           <software version="6.12">The SAS transport file was generated by the SAS CPORT procedure.</software>
+                        </fileTxt>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="sourcesType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:choice>
+               <xs:sequence>
+                  <xs:element ref="dataSrc" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="sourceCitation" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="srcOrig" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="srcChar" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="srcDocu" minOccurs="0" maxOccurs="unbounded"/>
+                  <xs:element ref="sources" minOccurs="0" maxOccurs="unbounded"/>
+               </xs:sequence>
+            </xs:choice>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="sources" type="sourcesType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Sources Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Description of sources used for the data collection. The element is nestable so that the sources statement might encompass a series of discrete source statements, each of which could contain the facts about an individual source. This element maps to Dublin Core Source element.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="sourceCitation" type="citationType">
+			<xs:annotation>
+				<xs:documentation>
+					<xhtml:div>
+
+						<xhtml:h1 class="element_title">Source Citation</xhtml:h1>
+
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Description</xhtml:h2>
+							<xhtml:div class="description">This complex element allows the inclusion of a standard citation for the sources used in collecting and creating the dataset.</xhtml:div>
+						</xhtml:div>
+
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[
+<sourceCitation>
+<titlStmt>
+<titl>Tenth Decennial Census of the United States, 1880. Volume I. Statistics of the Population of the United States at the Tenth Census.</titl>
+</titlStmt>
+<rspStmt>
+<AuthEnty affiliation="U.S. Department of Commerce">United States Census Bureau</AuthEnty>
+</rspStmt>
+<prodStmt>
+<producer>Government Printing Office</producer>
+<prodDate>1883</prodDate>
+</prodStmt>
+</sourceCitation>
+]]>
+								</xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+					</xhtml:div>
+				</xs:documentation>
+			</xs:annotation>
+		</xs:element>
+   
+   <xs:element name="southBL" type="phraseType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">South Bounding Latitude</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The southernmost coordinate delimiting the geographic extent of the dataset. A valid range of values, expressed in decimal degrees (positive east and positive north), is: -90,0 &lt;=South Bounding Latitude Value &lt;= 90,0 ; South Bounding Latitude Value &lt;= North Bounding Latitude Value</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="specPermType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="required" default="yes">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="yes"/>
+                     <xs:enumeration value="no"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="formNo" type="xs:string"/>
+            <xs:attribute name="URI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="specPerm" type="specPermType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Special Permissions</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This element is used to determine if any special permissions are required to access a resource. The "required" attribute is used to aid machine processing of this element, and the default specification is "yes". The "formNo" attribute indicates the number or ID of the form that the user must fill out. The "URI" attribute may be used to provide a URN or URL for online access to a special permissions form.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <specPerm formNo="4">The user must apply for special permission to use this dataset locally and must complete a confidentiality form.</specPerm>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="srcChar" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Characteristics of Source Noted</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Assessment of characteristics and quality of source material. May not be relevant to survey data. This element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="srcDocu" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Documentation and Access to Sources</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Level of documentation of the original sources. May not be relevant to survey data. This element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="srcOrig" type="conceptualTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Origins of Sources</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">For historical materials, information about the origin(s) of the sources and the rules followed in establishing the sources should be specified. May not be relevant to survey data. This element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="stdCatgryType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextAndDateType">
+            <xs:attribute name="URI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="stdCatgry" type="stdCatgryType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Standard Categories</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Standard category codes used in the variable, like industry codes, employment codes, or social class codes. The attribute "date" is provided to indicate the version of the code in place at the time of the study. The attribute "URI" is provided to indicate a URN or URL that can be used to obtain an electronic list of the category codes.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <stdCatgry date="1981" source="producer">U. S. Census of Population and Housing, Classified Index of Industries and Occupations </stdCatgry>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="stdyClasType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="stdyClas" type="stdyClasType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Class of the Study</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Generally used to give the data archive's class or study status number, which indicates the processing status of the study. May also be used as a text field to describe processing status. This element may be repeated to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <stdyClas>ICPSR Class II</stdyClas>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <stdyClas>DDA Class C</stdyClas>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <stdyClas>Available from the DDA. Being processed. </stdyClas>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="stdyDscrType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="citation" maxOccurs="unbounded"/>
+               <xs:element ref="studyAuthorization" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="stdyInfo" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="studyDevelopment" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="method" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataAccs" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="othrStdyMat" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="access" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="stdyDscr" type="stdyDscrType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Study Description</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The Study Description consists of information about the data collection, study, or compilation that the DDI-compliant documentation file describes. This section includes information about how the study should be cited, who collected or compiled the data, who distributes the data, keywords about the content of the data, summary (abstract) of the content of the data, data collection methods and processing, etc. Note that some content of the Study Description's Citation -- e.g., Responsibility Statement -- may be identical to that of the Documentation Citation. This is usually the case when the producer of a data collection also produced the print or electronic codebook for that data collection.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="studyDevelopmentType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="developmentActivity" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="studyDevelopment" type="studyDevelopmentType">
+      <xs:annotation>
+         <xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Study Development</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Describe the process of study development as a series of development activities. These activities can be typed using a controlled vocabulary. Describe the activity, listing participants with their role and affiliation, resources used (sources of information), and the outcome of the development activity.</xhtml:div>
+					</xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:p>This would allow you to provide inputs for a number of development activities you wanted to capture using separate entry screens and tagged storage of developmentActivity using the type attribute. For example if there was an activity related to data availability the developmentActivity might be as follows:</xhtml:p>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+								<developmentActivity type="checkDataAvailability">
+									<description>A number of potential sources were evaluated for content, consistency and quality</description>
+									<participant affiliation="NSO" role="statistician">John Doe</participant>
+									</resource>
+										<dataSrc>Study S</dataSrc>
+										<srcOrig>Collected in 1970 using unknown sampling method</srcOrig>
+										<srcChar>Information incomplete missing X province</srcChar>
+									</resource>
+									<outcome>Due to quality issues this was determined not to be a viable source of data for the study</outcome>
+								</developmentActivity>
+                     ]]></xhtml:samp>
+                     <xhtml:p>This generic structure would allow you to designate additional design activities etc.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="developmentActivityType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element name="description" type="simpleTextType" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="participant" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="resource" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element name="outcome" type="simpleTextType" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="developmentActivity" type="developmentActivityType"/>
+
+   <xs:complexType name="participantType">
+      <xs:simpleContent>
+         <xs:extension base="stringType">
+            <xs:attribute name="affiliation" type="xs:string" use="optional"/>
+            <xs:attribute name="abbr" type="xs:string" use="optional"/>
+            <xs:attribute name="role" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+
+   <xs:element name="participant" type="participantType"/>
+
+   <xs:complexType name="resourceType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element name="dataSrc" type="simpleTextType" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="srcOrig" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="srcChar" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="srcDocu" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="resource" type="resourceType"/>
+
+   <xs:complexType name="studyAuthorizationType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="authorizingAgency" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="authorizationStatement" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="date" type="dateSimpleType" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="studyAuthorization" type="studyAuthorizationType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Study Authorization</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Provides structured information on the agency that authorized the study, the date of authorization, and an authorization statement.</xhtml:div>
+               </xhtml:div>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[
+<studyAuthorization date="2010-11-04">
+<authorizingAgency affiliation="University of Georgia" abbr="HSO">Human Subjects Office</authorizingAgency>
+<authorizationStatement>Statement of authorization issued bu OUHS on 2010-11-04</authorizationStatement></studyAuthorization>
+]]>
+								</xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="authorizingAgencyType">
+      <xs:simpleContent>
+         <xs:extension base="stringType">
+            <xs:attribute name="affiliation" type="xs:string" use="optional"/>
+            <xs:attribute name="abbr" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+   
+   <xs:element name="authorizingAgency" type="authorizingAgencyType">
+      <xs:annotation>
+				<xs:documentation>
+					<xhtml:div>
+						<xhtml:h1 class="element_title">Authorizing Agency</xhtml:h1>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Description</xhtml:h2>
+							<xhtml:div class="description">Name of the agent or agency that authorized the study. The "affiliation" attribute indicates the institutional affiliation of the authorizing agent or agency. The "abbr" attribute holds the abbreviation of the authorizing agent's or agency's name.</xhtml:div>
+						</xhtml:div>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[
+<authorizingAgency affiliation="Purdue University" abbr="OUHS">Office for Use of Human Subjects</authorizingAgency>
+]]>
+								</xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+					</xhtml:div>
+				</xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="authorizationStatement" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Authorization Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The text of the authorization. Use XHTML to capture significant structure in the document.</xhtml:div>
+               </xhtml:div>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[
+<authorizationStatement>Required documentation covering the study purpose, disclosure information, questionnaire content, and consent statements was delivered to the OUHS on 2010-10-01 and was reviewed by the compliance officer. Statement of authorization for the described study was issued on 2010-11-04</authorizationStatement>
+]]>
+								</xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="stdyInfoType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="studyBudget" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="subject" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="abstract" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="sumDscr" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="qualityStatement" minOccurs="0"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="exPostEvaluation" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="stdyInfo" type="stdyInfoType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Study Scope</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This section contains information about the data collection's scope across several dimensions, including substantive content, geography, and time.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="qualityStatementType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="standardsCompliance" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element name="otherQualityStatement" type="simpleTextType" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="qualityStatement" type="qualityStatementType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Quality Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">This structure consists of two parts, standardsCompliance and otherQualityStatements. In standardsCompliance list all specific standards complied with during the execution of this study. Note the standard name and producer and how the study complied with the standard. Enter any additional quality statements in otherQualityStatements.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="standardsComplianceType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="standard"/>
+               <xs:element name="complianceDescription" type="simpleTextType" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="standardsCompliance" type="standardsComplianceType">
+			<xs:annotation>
+				<xs:documentation>
+					<xhtml:div>
+						<xhtml:h1 class="element_title">Standards Compliance</xhtml:h1>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Description</xhtml:h2>
+							<xhtml:div class="description">This section lists all specific standards complied with during the execution of this study. Specify the standard(s)' name(s) and producer(s) and describe how the study complied with each standard in complianceDescription. Enter any additional quality statements in otherQualityStatement.</xhtml:div>
+						</xhtml:div>
+						<xhtml:div>
+							<xhtml:h2 class="section_header">Example</xhtml:h2>
+							<xhtml:div class="example">
+								<xhtml:samp class="xml_sample"><![CDATA[
+<standardsCompliance><standard>
+<standardName>Data Documentation Initiative</standardName>
+<producer>DDI Alliance</producer></standard>
+<complianceDescription>Study metadata was created in compliance with the Data Documentation Initiative (DDI) standard</complianceDescription>
+</standardsCompliance>
+]]>
+								</xhtml:samp>
+							</xhtml:div>
+						</xhtml:div>
+					</xhtml:div>
+				</xs:documentation>
+			</xs:annotation>
+		</xs:element>
+
+
+   <xs:complexType name="standardType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="standardName" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="producer" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="standard" type="standardType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Standard</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Describes a standard with which the study complies.</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+	</xs:element>
+
+   <xs:complexType name="standardNameType">
+      <xs:simpleContent>
+         <xs:extension base="stringType">
+            <xs:attribute name="date" type="dateSimpleType" use="optional"/>
+            <xs:attribute name="version" type="xs:string" use="optional"/>
+            <xs:attribute name="URI" type="xs:anyURI" use="optional"/>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+
+   <xs:element name="standardName" type="standardNameType">
+		<xs:annotation>
+			<xs:documentation>
+				<xhtml:div>
+					<xhtml:h1 class="element_title">Standard Name</xhtml:h1>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Description</xhtml:h2>
+						<xhtml:div class="description">Contains the name of the standard with which the study complies. The "date" attribute specifies the date when the standard was published, the "version" attribute includes the specific version of the standard with which the study is compliant, and the "URI" attribute includes the URI for the actual standard.</xhtml:div>
+					</xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<standardName date="2009-10-18" version="3.1" URI="http://www.ddialliance.org/Specification/DDI-Lifecycle/3.1/">Data Documentation Initiative</standardName>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+				</xhtml:div>
+			</xs:documentation>
+		</xs:annotation>
+	</xs:element>
+
+   <xs:complexType name="exPostEvaluationType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="evaluator" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="evaluationProcess" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="outcomes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="completionDate" type="dateSimpleType" use="optional"/>
+            <xs:attribute name="type" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="exPostEvaluation" type="exPostEvaluationType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Post Evaluation Procedures</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Use this section to describe evaluation procedures not address in data evaluation processes. These may include issues such as timing of the study, sequencing issues, cost/budget issues, relevance, instituional or legal arrangments etc. of the study. The completionDate attribute holds the date the evaluation was completed. The type attribute is an optional type to identify the type of evaluation with or without the use of a controlled vocabulary.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<exPostEvaluation date="2003" type="comprehensive">
+<evaluator affiliation="United Nations" abbr="UNSD" role="consultant">United Nations Statistical Division</evaluator>
+<evaluationProcess>In-depth review of pre-collection and collection procedures</evaluationProcess>
+<outcomes>The following steps were highly effective in increasing response rates, and should be repeated in the next collection cycle...</outcomes>
+</exPostEvaluation>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="evaluatorType">
+      <xs:simpleContent>
+         <xs:extension base="stringType">
+            <xs:attribute name="affiliation" type="xs:string" use="optional"/>
+            <xs:attribute name="abbr" type="xs:string" use="optional"/>
+            <xs:attribute name="role" type="xs:string" use="optional"/>
+         </xs:extension>
+      </xs:simpleContent>
+   </xs:complexType>
+   
+   <xs:element name="evaluator" type="evaluatorType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Evaluator Type</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The evaluator element identifies persons or organizations involved in the evaluation. The affiliation attribute contains the affiliation of the individual or organization. The abbr attribute holds an abbreviation for the individual or organization. The role attribute indicates the role played by the individual or organization in the evaluation process.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<evaluator affiliation="United Nations" abbr="UNSD" role="consultant">United Nations Statistical Division</evaluator>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="evaluationProcess" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Evaluation Process</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Describes the evaluation process followed.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="outcomes" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Evaluation Outcomes</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Describe the outcomes of the evaluation.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<outcomes>The following steps were highly effective in increasing response rates, and should be repeated in the next collection cycle...</outcomes>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+             </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="studyBudget" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Study Budget</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Describe the budget of the project in as much detail as needed. Use XHTML structure elements to identify discrete pieces of information in a way that facilitates direct transfer of information on the study budget between DDI 2 and DDI 3 structures.</xhtml:div>
+               </xhtml:div>
+					<xhtml:div>
+						<xhtml:h2 class="section_header">Example</xhtml:h2>
+						<xhtml:div class="example">
+							<xhtml:samp class="xml_sample"><![CDATA[
+<studyBudget>The budget for the study covers a 5 year award period distributed between direct and indirect costs including: Staff, ...</studyBudget>
+]]>
+							</xhtml:samp>
+						</xhtml:div>
+					</xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:element name="subTitl" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Subtitle</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">A secondary title used to amplify or state certain limitations on the main title. It may repeat information already in the main title. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <titl>Monitoring the Future: A Continuing Study of American Youth, 1995</titl>
+                     ]]></xhtml:samp> 
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <subTitl>A Continuing Study of American Youth, 1995</subTitl>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <titl>Census of Population, 1950 [United States]: Public Use Microdata Sample</titl>
+                     ]]></xhtml:samp> 
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <subTitl>Public Use Microdata Sample</subTitl>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="subjectType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="keyword" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="topcClas" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="subject" type="subjectType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Subject Information</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Subject information describing the data collection's intellectual content.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="sumDscrType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="timePrd" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="collDate" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="nation" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="geogCover" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="geogUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="geoBndBox" minOccurs="0"/>
+               <xs:element ref="boundPoly" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="anlyUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="universe" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="dataKind" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="sumDscr" type="sumDscrType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Summary Data Description</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information about the and geographic coverage of the study and unit of analysis.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="sumStatType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="wgtd" default="not-wgtd">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="wgtd"/>
+                     <xs:enumeration value="not-wgtd"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="wgt-var" type="xs:IDREFS"/>
+            <xs:attribute name="weight" type="xs:IDREFS"/>
+            <xs:attribute name="type" use="required">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="mean"/>
+                     <xs:enumeration value="medn"/>
+                     <xs:enumeration value="mode"/>
+                     <xs:enumeration value="vald"/>
+                     <xs:enumeration value="invd"/>
+                     <xs:enumeration value="min"/>
+                     <xs:enumeration value="max"/>
+                     <xs:enumeration value="stdev"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherType" type="xs:NMTOKEN" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="sumStat" type="sumStatType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Summary Statistics</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>One or more statistical measures that describe the responses to a particular variable and may include one or more standard summaries, e.g., minimum and maximum values, median, mode, etc. The attribute "wgtd" indicates whether the statistics are weighted or not. The "weight" attribute is an IDREF(S) to the weight element(s) in the study description.</xhtml:p>
+                     <xhtml:p>The attribute "type" denotes the type of statistics being shown: mean, median, mode, valid cases, invalid cases, minimum, maximum, or standard deviation. If a value of "other" is used here, a value taken from a controlled vocabulary should be put in the "otherType" attribute. This option should only be used when applying a controlled vocabulary to this attribute. Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <sumStat type="min">0</sumStat>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <sumStat type="max">9</sumStat>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <sumStat type="median">4</sumStat>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="tableType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="titl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="tgroup" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="frame">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="top"/>
+                     <xs:enumeration value="bottom"/>
+                     <xs:enumeration value="topbot"/>
+                     <xs:enumeration value="all"/>
+                     <xs:enumeration value="sides"/>
+                     <xs:enumeration value="none"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="colsep" type="xs:string"/>
+            <xs:attribute name="rowsep" type="xs:string"/>
+            <xs:attribute name="pgwide" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="table" type="tableType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Table</xhtml:h1>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="tbodyType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="row" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="valign">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="top"/>
+                     <xs:enumeration value="middle"/>
+                     <xs:enumeration value="bottom"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="tbody" type="tbodyType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Table Body</xhtml:h1>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="tgroupType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="colspec" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="thead" minOccurs="0"/>
+               <xs:element ref="tbody"/>
+            </xs:sequence>
+            <xs:attribute name="cols" type="xs:string" use="required"/>
+            <xs:attribute name="colsep" type="xs:string"/>
+            <xs:attribute name="rowsep" type="xs:string"/>
+            <xs:attribute name="align">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="left"/>
+                     <xs:enumeration value="right"/>
+                     <xs:enumeration value="center"/>
+                     <xs:enumeration value="justify"/>
+                     <xs:enumeration value="char"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="tgroup" type="tgroupType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Table Group</xhtml:h1>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="theadType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="row" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="valign">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="top"/>
+                     <xs:enumeration value="middle"/>
+                     <xs:enumeration value="bottom"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="thead" type="theadType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Table Head</xhtml:h1>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="timeMethType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="conceptualTextType">
+            <xs:attribute name="method" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="timeMeth" type="timeMethType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Time Method</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The time method or time dimension of the data collection. The "method" attribute is included to permit the development of a controlled vocabulary for this element. For forward-compatibility, DDI 3 XHTML tags may be used in this element.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <timeMeth>panel survey</timeMeth>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <timeMeth>cross-section</timeMeth>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <timeMeth>trend study</timeMeth>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <timeMeth>time-series</timeMeth>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="timePrdType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextAndDateType">
+            <xs:attribute name="event" default="single">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="start"/>
+                     <xs:enumeration value="end"/>
+                     <xs:enumeration value="single"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="cycle" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="timePrd" type="timePrdType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Time Period Covered</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The time period to which the data refer. This item reflects the time period covered by the data, not the dates of coding or making documents machine-readable or the dates the data were collected. Also known as span. Use the event attribute to specify "start", "end", or "single" for each date entered. The ISO standard for dates (YYYY-MM-DD) is recommended for use with the "date" attribute. The "cycle" attribute permits specification of the relevant cycle, wave, or round of data. Maps to Dublin Core Coverage element. Inclusion of this element is recommended. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <timePrd event="start" date="1998-05-01">May 1, 1998</timePrd>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <timePrd event="end" date="1998-05-31">May 31, 1998</timePrd>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="titl" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Title</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Full authoritative title for the work at the appropriate level: marked-up document; marked-up document source; study; other material(s) related to study description; other material(s) related to study. The study title will in most cases be identical to the title for the marked-up document. A full title should indicate the geographic scope of the data collection as well as the time period covered. Title of data collection (codeBook/stdyDscr/citation/titlStmt/titl) maps to Dublin Core Title element. This element is required in the Study Description citation. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <titl>Domestic Violence Experience in Omaha, Nebraska, 1986-1987</titl>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <titl>Census of Population, 1950 [United States]: Public Use Microdata Sample</titl>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <titl>Monitoring the Future: A Continuing Study of American Youth, 1995</titl>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="titlStmtType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="titl"/>
+               <xs:element ref="subTitl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="altTitl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="parTitl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="IDNo" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="titlStmt" type="titlStmtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Title Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Title statement for the work at the appropriate level: marked-up document; marked-up document source; study; study description, other materials; other materials for study.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="topcClasType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="vocab" type="xs:string"/>
+            <xs:attribute name="vocabURI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="topcClas" type="topcClasType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Topic Classification</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The classification field indicates the broad substantive topic(s) that the data cover. Library of Congress subject terms may be used here. The "vocab" attribute is provided for specification of the controlled vocabulary in use, e.g., LCSH, MeSH, etc. The "vocabURI" attribute specifies the location for the full controlled vocabulary. Maps to Dublin Core Subject element. Inclusion of this element in the codebook is recommended.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <topcClas vocab="LOC Subject Headings" vocabURI="http://www.loc.gov/catdir/cpso/lcco/lcco.html">Public opinion -- California -- Statistics</topcClas>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <topcClas vocab="LOC Subject Headings" vocabURI="http://www.loc.gov/catdir/cpso/lcco/lcco.html">Elections -- California</topcClas>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="TotlResp" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Total Responses</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The number of responses to this variable. This element might be used if the number of responses does not match added case counts. It may also be used to sum the frequencies for variable categories.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <TotlResp>1,056</TotlResp>
+                        </var>
+                     ]]></xhtml:samp>
+                  <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <TotlResp>There are only 725 responses to this question since it was not asked in Tanzania.</TotlResp>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="txtType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="tableAndTextType">
+            <xs:attribute name="level" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="txt" type="txtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Descriptive Text</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Lengthier description of the parent element. The attribute "level" indicates the level to which the element applies. The attribute "sdatrefs" allows pointing to specific dates, universes, or other information encoded in the study description.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <varGrp type="subject">
+                           <txt>The following five variables refer to respondent attitudes toward national environmental policies: air pollution, urban sprawl, noise abatement, carbon dioxide emissions, and nuclear waste.</txt>
+                        </varGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCubeGrp type="subject">
+                           <txt>The following four nCubes are grouped to present a cross tabulation of the variables Sex, Work experience in 1999, and Income in 1999.</txt>
+                        </nCubeGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <txt>Total population for the agency for the year reported.</txt>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgryGrp>
+                           <txt>When the respondent indicated his political party reference, his response was coded on a scale of 1-99 with parties with a left-wing orientation coded on the low end of the scale and parties with a right-wing orientation coded on the high end of the scale.  Categories 90-99 were reserved miscellaneous responses.</txt>
+                        </catgryGrp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <catgry>
+                           <txt>Inap., question not asked in Ireland, Northern Ireland, and Luxembourg.</txt>
+                        </catgry>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <txt>Detailed poverty status for age cohorts over a period of five years, to be used in determining program eligibility</txt>
+                        </nCube>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <otherMat URI="http://www.icpsr.umich.edu/..">
+                           <txt>This is a PDF version of the original questionnaire provided by the principal investigator.</txt>
+                        </otherMat>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <otherMat>
+                           <txt>Glossary of Terms. Below are terms that may  prove useful in working with the technical documentation for this study.. </txt>
+                        </otherMat>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <otherMat>
+                           <txt>This is a PDF version of the original questionnaire provided by the principal investigator.</txt>
+                        </otherMat>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="undocCod" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">List of Undocumented Codes</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Values whose meaning is unknown.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <undocCod>Responses for categories 9 and 10 are unavailable.</undocCod>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="universeType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="conceptualTextType">
+            <xs:attribute name="level" type="xs:string"/>
+            <xs:attribute name="clusion" default="I">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="I"/>
+                     <xs:enumeration value="E"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="universe" type="universeType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Universe</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The group of persons or other elements that are the object of research and to which any analytic results refer. Age,nationality, and residence commonly help to delineate a given universe, but any of a number of factors may be involved, such as sex, race, income, veteran status, criminal convictions, etc. The universe may consist of elements other than persons, such as housing units, court cases, deaths, countries, etc. In general, it should be possible to tell from the description of the universe whether a given individual or element (hypothetical or real) is a member of the population under study. A "level" attribute is included to permit coding of the level to which universe applies, i.e., the study level, the file level (if different from study), the record group, the variable group, the nCube group, the variable, or the nCube level. The "clusion" attribute provides for specification of groups included (I) in or excluded (E) from the universe. If all the variables/nCubes described in the data documentation relate to the same population, e.g., the same set of survey respondents, this element would be unnecessary at data description level. In this case, universe can be fully described at the study level. For forward-compatibility, DDI 3 XHTML tags may be used in this element. This element may be repeated only to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <universe clusion="I">Individuals 15-19 years of age. </universe>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <universe clusion="E">Individuals younger than 15 and older than 19 years of age.</universe>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="useStmtType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="confDec" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="specPerm" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="restrctn" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="contact" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="citReq" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="deposReq" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="conditions" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="disclaimer" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="useStmt" type="useStmtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Use Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Information on terms of use for the data collection. This element may be repeated only to support multiple language expressions of the content.</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="valrngType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:choice minOccurs="1" maxOccurs="unbounded">
+                  <xs:element ref="item"/>
+                  <xs:element ref="range"/>
+               </xs:choice>
+               <xs:element ref="key" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="valrng" type="valrngType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Range of Valid Data Values</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Values for a particular variable that represent legitimate responses.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <valrng>
+                           <range min="1" max="3"/>
+                        </valrng>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <valrng>
+                           <item VALUE="1"/>
+                           <item VALUE="2"/>
+                           <item VALUE="3"/>
+                        </valrng>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="varType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="location" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="imputation" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="security" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="embargo" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="respUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="anlysUnit" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="qstn" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="valrng" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="invalrng" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="undocCod" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="universe" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="TotlResp" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="sumStat" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="stdCatgry" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="catgryGrp" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="catgry" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="codInstr" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="verStmt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="concept" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="derivation" minOccurs="0"/>
+               <xs:element ref="varFormat" minOccurs="0"/>
+               <xs:element ref="geoMap" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="catLevel" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="name" type="xs:string" use="required"/>
+            <xs:attribute name="wgt" default="not-wgt">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="wgt"/>
+                     <xs:enumeration value="not-wgt"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="wgt-var" type="xs:IDREFS"/>
+            <xs:attribute name="weight" type="xs:IDREFS"/>
+            <xs:attribute name="qstn" type="xs:IDREFS"/>
+            <xs:attribute name="files" type="xs:IDREFS"/>
+            <xs:attribute name="vendor" type="xs:string"/>
+            <xs:attribute name="dcml" type="xs:string"/>
+            <xs:attribute name="intrvl" default="discrete">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="contin"/>
+                     <xs:enumeration value="discrete"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="rectype" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+            <xs:attribute name="methrefs" type="xs:IDREFS"/>
+            <xs:attribute name="pubrefs" type="xs:IDREFS"/>
+            <xs:attribute name="access" type="xs:IDREFS"/>
+            <xs:attribute name="aggrMeth">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="sum"/>
+                     <xs:enumeration value="average"/>
+                     <xs:enumeration value="count"/>
+                     <xs:enumeration value="mode"/>
+                     <xs:enumeration value="median"/>
+                     <xs:enumeration value="maximum"/>
+                     <xs:enumeration value="minimum"/>
+                     <xs:enumeration value="percent"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherAggrMeth" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="measUnit" type="xs:string"/>
+            <xs:attribute name="scale" type="xs:string"/>
+            <xs:attribute name="origin" type="xs:string"/>
+            <xs:attribute name="nature">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="nominal"/>
+                     <xs:enumeration value="ordinal"/>
+                     <xs:enumeration value="interval"/>
+                     <xs:enumeration value="ratio"/>
+                     <xs:enumeration value="percent"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="additivity">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="stock"/>
+                     <xs:enumeration value="flow"/>
+                     <xs:enumeration value="non-additive"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherAdditivity" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="temporal" default="N">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="Y"/>
+                     <xs:enumeration value="N"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="geog" default="N">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="Y"/>
+                     <xs:enumeration value="N"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="geoVocab" type="xs:string"/>
+            <xs:attribute name="catQnty" type="xs:string"/>
+            <xs:attribute name="representationType">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="text"/>
+                     <xs:enumeration value="numeric"/>
+                     <xs:enumeration value="code"/>
+                     <xs:enumeration value="datetime"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherRepresentationType" type="xs:NMTOKEN" use="optional"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="var" type="varType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Variable</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>This element describes all of the features of a single variable in a social science data file. The following elements are repeatable to support multi-language content: anlysUnit, embargo, imputation, respUnit, security, TotlResp. It includes the following attributes: </xhtml:p>
+                     <xhtml:p>The attribute "name" usually contains the so-called "short label" for the variable, limited to eight characters in many statistical analysis systems such as SAS or SPSS. </xhtml:p>
+                     <xhtml:p>The attribute "wgt" indicates whether the variable is a weight. </xhtml:p>
+                     <xhtml:p>The attribute "wgt-var" references the weight variable(s) for this variable. </xhtml:p>
+                     <xhtml:p>The attribute "qstn" is a reference to the question ID for the variable. </xhtml:p>
+                     <xhtml:p>The attribute "files" is the IDREF identifying the file(s) to which the variable belongs. </xhtml:p>
+                     <xhtml:p>The attribute "vendor" is the origin of the proprietary format and includes SAS, SPSS, ANSI, and ISO. </xhtml:p>
+                     <xhtml:p>The attribute "dcml" refers to the number of decimal points in the variable. </xhtml:p>
+                     <xhtml:p>The attribute "intrvl" indicates the interval type; options are discrete or continuous.</xhtml:p>
+                     <xhtml:p>The "rectype" attribute refers to the record type to which the variable belongs. </xhtml:p>
+                     <xhtml:p>The "sdatrefs" are summary data description references which record the ID values of all elements within the summary data description section of the Study Description which might apply to the variable. These elements include: time period covered, date of collection, nation or country, geographic coverage, geographic unit, unit of analysis, universe, and kind of data. </xhtml:p>
+                     <xhtml:p>The "methrefs" are methodology and processing references which record the ID values of all elements within the study methodology and processing section of the Study Description which might apply to the variable. These elements include information on data collection and data appraisal (e.g., sampling, sources, weighting, data cleaning, response rates, and sampling error estimates). </xhtml:p>
+                     <xhtml:p>The "pubrefs" attribute provides a link to publication/citation references and records the ID values of all citations elements within Other Study Description Materials or Other Study-Related Materials that pertain to this variable. </xhtml:p>
+                     <xhtml:p>The attribute "access" records the ID values of all elements in the Data Access section that describe access conditions for this variable. </xhtml:p>
+                     <xhtml:p>The "aggrMeth" attribute indicates the type of aggregation method used, for example 'sum', 'average', 'count'. If a value of "other" is given a term from a controlled vocabulary should be used in the "otherAggrMeth" attribute.</xhtml:p>
+                     <xhtml:p>The "otherAggrMeth" attribute holds a value from a controlled vocabulary when the aggrMeth attribute has a value of "other".This option should only be used when applying a controlled vocabulary to this attribute. Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs.</xhtml:p>
+                     <xhtml:p>The attribute "measUnit" records the measurement unit, for example 'km', 'miles', etc. </xhtml:p>
+                     <xhtml:p>The "scale" attribute records unit of scale, for example 'x1', 'x1000', etc.</xhtml:p>
+                     <xhtml:p>The attribute "origin" records the point of origin for anchored scales.</xhtml:p>
+                     <xhtml:p>The "nature" attribute records the nature of the variable, whether it is 'nominal', 'ordinal', 'interval', 'ratio', or 'percent'. If the 'other' value is used, a value from a controlled vocabulary should be put into the otherNature attribute.</xhtml:p>
+                     <xhtml:p>The "otherNature" attribute should be used when the nature attribute has a value of "other". This option should only be used when applying a controlled vocabulary to this attribute. Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs.</xhtml:p>
+                     <xhtml:p>The attribute "additivity" records type of additivity, such as 'stock', 'flow', 'non-additive'. When the "other" value is used, a value from a controlled vocabulary should be put into the "otherAdditivity" attribute.</xhtml:p>
+                     <xhtml:p>The "otherAdditivity" attribute is used only when the "additivity" attribute has a value of "other". This option should only be used when applying a controlled vocabulary to this attribute. Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs.</xhtml:p>
+                     <xhtml:p>The attribute "temporal" indicates whether the variable relays time-related information. </xhtml:p>
+                     <xhtml:p>The "geog" attribute indicates whether the variable relays geographic information.</xhtml:p>
+                     <xhtml:p>The attribute "geoVocab" records the coding scheme used in the variable.</xhtml:p>
+                     <xhtml:p>The attribute "catQnty" records the number of categories found in the variable, and is used primarily for aggregate data files for verifying cell counts in nCubes.</xhtml:p>
+                     <xhtml:p>The "representationType" attribute was added to capture the specific DDI 3 representation type to facilitate translation between DDI 2 and DDI 3. If the "other" value is used, a term from a controlled vocabulary may be supplied in the otherRepresentationType attribute.</xhtml:p>
+                     <xhtml:p>The "otherRepresentationType" attribute should be used when the representationType attribute has a value of "other". This option should only be used when applying a controlled vocabulary to this attribute. Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+   
+   <xs:complexType name="varFormatType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="type" default="numeric">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="character"/>
+                     <xs:enumeration value="numeric"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="formatname" type="xs:string"/>
+            <xs:attribute name="schema" default="ISO">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="SAS"/>
+                     <xs:enumeration value="SPSS"/>
+                     <xs:enumeration value="IBM"/>
+                     <xs:enumeration value="ANSI"/>
+                     <xs:enumeration value="ISO"/>
+                     <xs:enumeration value="XML-Data"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherSchema" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="category" default="other">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="date"/>
+                     <xs:enumeration value="time"/>
+                     <xs:enumeration value="currency"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherCategory" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="URI" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="varFormat" type="varFormatType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Variable Format</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The technical format of the variable in question. Attributes for this element include: "type," which indicates if the variable is character or numeric; "formatname," which in some cases may provide the name of the particular, proprietary format actually used; "schema," which identifies the vendor or standards body that defined the format (acceptable choices are SAS, SPSS, IBM, ANSI, ISO, XML-data or other); "category," which describes what kind of data the format represents, and includes date, time, currency, or "other" conceptual possibilities; and "URI," which supplies a network identifier for the format definition. If the "other" value is used for the schema attribute, a value from a controlled vocabulary must be used with the "otherSchema" attribute, and the complex element controlledVocabUsed should be used to identify the controlled vocabulary to which the selected term belongs. For the category attribute, a value from a controlled vocabulary may be provided if the "other" value is chosen. In this case, the term from the controlled vocabulary should be placed in the "othercategory" attribute, and the controlledVocabUsed element should also be filled in.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <varFormat type="numeric" schema="SAS" formatname="DATE" category="date">The number in this  variable is stored in the form 'ddmmmyy' in SAS format.</varFormat>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <varFormat type="numeric" formatname="date.iso8601" schema="XML-Data" category="date" URI="http://www.w3.org/TR/1998/NOTE-XML-data/">19541022</varFormat>
+                        </var>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="varGrpType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="labl" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="txt" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="concept" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="defntn" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="universe" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+            <xs:attribute name="type" default="other">
+               <xs:simpleType>
+                  <xs:restriction base="xs:NMTOKEN">
+                     <xs:enumeration value="section"/>
+                     <xs:enumeration value="multipleResp"/>
+                     <xs:enumeration value="grid"/>
+                     <xs:enumeration value="display"/>
+                     <xs:enumeration value="repetition"/>
+                     <xs:enumeration value="subject"/>
+                     <xs:enumeration value="version"/>
+                     <xs:enumeration value="iteration"/>
+                     <xs:enumeration value="analysis"/>
+                     <xs:enumeration value="pragmatic"/>
+                     <xs:enumeration value="record"/>
+                     <xs:enumeration value="file"/>
+                     <xs:enumeration value="randomized"/>
+                     <xs:enumeration value="other"/>
+                  </xs:restriction>
+               </xs:simpleType>
+            </xs:attribute>
+            <xs:attribute name="otherType" type="xs:NMTOKEN" use="optional"/>
+            <xs:attribute name="var" type="xs:IDREFS"/>
+            <xs:attribute name="varGrp" type="xs:IDREFS"/>
+            <xs:attribute name="name" type="xs:string"/>
+            <xs:attribute name="sdatrefs" type="xs:IDREFS"/>
+            <xs:attribute name="methrefs" type="xs:IDREFS"/>
+            <xs:attribute name="pubrefs" type="xs:IDREFS"/>
+            <xs:attribute name="access" type="xs:IDREFS"/>
+            <xs:attribute name="nCube" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+   
+   <xs:element name="varGrp" type="varGrpType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Variable Group</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">
+                     <xhtml:p>A group of variables that may share a common subject, arise from the interpretation of a single question, or are linked by some other factor.</xhtml:p>
+                     <xhtml:p>Variable groups are created this way in order to permit variables to belong to multiple groups, including multiple subject groups such as a group of variables on sex and income, or to a subject and a multiple response group, without causing overlapping groups. Variables that are linked by use of the same question need not be identified by a Variable Group element because they are linked by a common unique question identifier in the Variable element. Note that as a result of the strict sequencing required by XML, all Variable Groups must be marked up before the Variable element is opened. That is, the mark-up author cannot mark up a Variable Group, then mark up its constituent variables, then mark up another Variable Group.</xhtml:p>
+                     <xhtml:p>The "type" attribute refers to the general type of grouping of the variables, e.g., subject, multiple response. Use the value of "other" if the value is to come from an external controlled vocabulary, and place the term into the otherType attribute.</xhtml:p>
+                     <xhtml:p>The "otherType" attribute is used when the "type" attribute has a value of "other". This option should only be used when applying a controlled vocabulary to this attribute. Use the complex element controlledVocabUsed to identify the controlled vocabulary to which the selected term belongs.</xhtml:p>
+                     <xhtml:p>Specific variable groups, included within the "type" attribute, are:</xhtml:p>
+                     <xhtml:p>Section: Questions which derive from the same section of the questionnaire, e.g., all variables located in Section C.</xhtml:p>
+                     <xhtml:p>Multiple response: Questions where the respondent has the opportunity to select more than one answer from a variety of choices, e.g., what newspapers have you read in the past month (with the respondent able to select up to five choices).</xhtml:p>
+                     <xhtml:p>Grid: Sub-questions of an introductory or main question but which do not constitute a multiple response group, e.g., I am going to read you some events in the news lately and you tell me for each one whether you are very interested in the event, fairly interested in the fact, or not interested in the event.</xhtml:p>
+                     <xhtml:p>Display: Questions which appear on the same interview screen (CAI) together or are presented to the interviewer or respondent as a group.</xhtml:p>
+                     <xhtml:p>Repetition: The same variable (or group of variables) which are repeated for different groups of respondents or for the same respondent at a different time.</xhtml:p>
+                     <xhtml:p>Subject: Questions which address a common topic or subject, e.g., income, poverty, children.</xhtml:p>
+                     <xhtml:p>Version: Variables, often appearing in pairs, which represent different aspects of the same question, e.g., pairs of variables (or groups) which are adjusted/unadjusted for inflation or season or whatever, pairs of variables with/without missing data imputed, and versions of the same basic question.</xhtml:p>
+                     <xhtml:p>Iteration: Questions that appear in different sections of the data file measuring a common subject in different ways, e.g., a set of variables which report the progression of respondent income over the life course.</xhtml:p>
+                     <xhtml:p>Analysis: Variables combined into the same index, e.g., the components of a calculation, such as the numerator and the denominator of an economic statistic.</xhtml:p>
+                     <xhtml:p>Pragmatic: A variable group without shared properties.</xhtml:p>
+                     <xhtml:p>Record: Variable from a single record in a hierarchical file.</xhtml:p>
+                     <xhtml:p>File: Variable from a single file in a multifile study.</xhtml:p>
+                     <xhtml:p>Randomized: Variables generated by CAI surveys produced by one or more random number variables together with a response variable, e.g, random variable X which could equal 1 or 2 (at random) which in turn would control whether Q.23 is worded "men" or "women", e.g., would you favor helping [men/women] laid off from a factory obtain training for a new job?</xhtml:p>
+                     <xhtml:p>Other: Variables which do not fit easily into any of the categories listed above, e.g., a group of variables whose documentation is in another language.</xhtml:p>
+                     <xhtml:p>The "var" attribute is used to reference all the constituent variable IDs in the group.</xhtml:p>
+                     <xhtml:p>The "varGrp" attribute is used to reference all the subsidiary variable groups which nest underneath the current varGrp. This allows for encoding of a hierarchical structure of variable groups. </xhtml:p>
+                     <xhtml:p>The attribute "name" provides a name, or short label, for the group.</xhtml:p>
+                     <xhtml:p>The "sdatrefs" are summary data description references that record the ID values of all elements within the summary data description section of the Study Description that might apply to the group. These elements include: time period covered, date of collection, nation or country, geographic coverage, geographic unit, unit of analysis, universe, and kind of data.</xhtml:p>
+                     <xhtml:p>The "methrefs" are methodology and processing references which record the ID values of all elements within the study methodology and processing section of the Study Description which might apply to the group. These elements include information on data collection and data appraisal (e.g., sampling, sources, weighting, data cleaning, response rates, and sampling error estimates).</xhtml:p>
+                     <xhtml:p>The "pubrefs" attribute provides a link to publication/citation references and records the ID values of all citations elements within codeBook/stdyDscr/othrStdyMat or codeBook/otherMat that pertain to this variable group. </xhtml:p>
+                     <xhtml:p>The "access" attribute records the ID values of all elements in codeBook/stdyDscr/dataAccs of the document that describe access conditions for this variable group.</xhtml:p>
+                     <xhtml:p>The attribute "nCube" was included in 2.0 and subsequent versions in ERROR. DO NOT USE THIS ATTRIBUTE. It is retained only for purposes of backward-compatibility.</xhtml:p>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="varQnty" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Overall Variable Count</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Number of variables.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <varQnty>27</varQnty>
+                     ]]></xhtml:samp> 
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="verRespType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextType">
+            <xs:attribute name="affiliation" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="verResp" type="verRespType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Version Responsibility Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The organization or person responsible for the version of the work.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <verResp>Zentralarchiv fuer Empirische Sozialforschung</verResp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <verResp>Inter-university Consortium for Political and Social  Research</verResp>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <verStmt>
+                              <verResp>Zentralarchiv fuer Empirische Sozialforschung</verResp>
+                           </verStmt>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <verStmt>
+                              <verResp>Zentralarchiv fuer Empirische Sozialforschung</verResp>
+                           </verStmt>
+                        </nCube>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="verStmtType">
+      <xs:complexContent>
+         <xs:extension base="baseElementType">
+            <xs:sequence>
+               <xs:element ref="version" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="verResp" minOccurs="0" maxOccurs="unbounded"/>
+               <xs:element ref="notes" minOccurs="0" maxOccurs="unbounded"/>
+            </xs:sequence>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="verStmt" type="verStmtType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Version Statement</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Version statement for the work at the appropriate level: marked-up document; marked-up document source; study; study description, other material; other material for study. A version statement may also be included for a data file, a variable, or an nCube.</xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <verStmt>
+                           <version type="version" date="1999-01-25">Second version</version>
+                        </verStmt>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:complexType name="versionType" mixed="true">
+      <xs:complexContent>
+         <xs:extension base="simpleTextAndDateType">
+            <xs:attribute name="type" type="xs:string"/>
+         </xs:extension>
+      </xs:complexContent>
+   </xs:complexType>
+
+   <xs:element name="version" type="versionType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Version</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">Also known as release or edition. If there have been substantive changes in the data/documentation since their creation, this statement should be used at the appropriate level. The ISO standard for dates (YYYY-MM-DD) is recommended for use with the "date" attribute. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <version type="edition" date="1999-01-25">Second ICPSR Edition</version>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <var>
+                           <verStmt>
+                              <version type="version" date="1999-01-25">Second version of V25</version>
+                           </verStmt>
+                        </var>
+                     ]]></xhtml:samp>
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <nCube>
+                           <verStmt>
+                              <version type="version" date="1999-01-25">Second version of N25</version>
+                           </verStmt>
+                        </nCube>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="weight" type="simpleTextType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">Weighting</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The use of sampling procedures may make it necessary to apply weights to produce accurate statistical results. Describe here the criteria for using weights in analysis of a collection. If a weighting formula or coefficient was developed, provide this formula, define its elements, and indicate how the formula is applied to data. </xhtml:div>
+               </xhtml:div>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Example</xhtml:h2>
+                  <xhtml:div class="example">
+                     <xhtml:samp class="xml_sample"><![CDATA[
+                        <weight>The 1996 NES dataset includes two final person-level analysis weights which incorporate sampling, nonresponse, and post-stratification factors. One weight (variable #4) is for longitudinal micro-level analysis using the 1996 NES Panel. The other weight (variable #3) is for analysis of the 1996 NES combined sample (Panel component cases plus Cross-section supplement cases). In addition, a Time Series Weight (variable #5) which corrects for Panel attrition was constructed. This weight should be used in analyses which compare the 1996 NES to earlier unweighted National Election Study data collections.</weight>
+                     ]]></xhtml:samp>
+                  </xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+   <xs:element name="westBL" type="phraseType">
+      <xs:annotation>
+         <xs:documentation>
+            <xhtml:div>
+               <xhtml:h1 class="element_title">West Bounding Longitude</xhtml:h1>
+               <xhtml:div>
+                  <xhtml:h2 class="section_header">Description</xhtml:h2>
+                  <xhtml:div class="description">The westernmost coordinate delimiting the geographic extent of the dataset. A valid range of values, expressed in decimal degrees (positive east and positive north), is: -180,0 &lt;=West Bounding Longitude Value &lt;= 180,0</xhtml:div>
+               </xhtml:div>
+            </xhtml:div>
+         </xs:documentation>
+      </xs:annotation>
+   </xs:element>
+
+</xs:schema>
\ No newline at end of file
diff --git a/src/test/resources/xml/xsd/ddi-codebook-2.5/xml.xsd b/src/test/resources/xml/xsd/ddi-codebook-2.5/xml.xsd
new file mode 100644
index 00000000000..48ec4d89c75
--- /dev/null
+++ b/src/test/resources/xml/xsd/ddi-codebook-2.5/xml.xsd
@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<xs:schema targetNamespace="http://www.w3.org/XML/1998/namespace" xmlns:xs="http://www.w3.org/2001/XMLSchema" xml:lang="en">
+
+ <xs:annotation>
+  <xs:documentation>
+   See http://www.w3.org/XML/1998/namespace.html and
+   http://www.w3.org/TR/REC-xml for information about this namespace.
+  </xs:documentation>
+ </xs:annotation>
+
+ <xs:annotation>
+  <xs:documentation>This schema defines attributes and an attribute group
+	suitable for use by
+	schemas wishing to allow xml:base, xml:lang or xml:space attributes
+	on elements they define.
+
+	To enable this, such a schema must import this schema
+	for the XML namespace, e.g. as follows:
+	&lt;schema . . .&gt;
+	 . . .
+	 &lt;import namespace="http://www.w3.org/XML/1998/namespace"
+		    schemaLocation="http://www.w3.org/2001/03/xml.xsd"/&gt;
+
+	Subsequently, qualified reference to any of the attributes
+	or the group defined below will have the desired effect, e.g.
+
+	&lt;type . . .&gt;
+	 . . .
+	 &lt;attributeGroup ref="xml:specialAttrs"/&gt;
+
+	 will define a type which will schema-validate an instance
+	 element with any of those attributes</xs:documentation>
+ </xs:annotation>
+
+ <xs:annotation>
+  <xs:documentation>In keeping with the XML Schema WG's standard versioning
+   policy, this schema document will persist at
+   http://www.w3.org/2001/03/xml.xsd.
+   At the date of issue it can also be found at
+   http://www.w3.org/2001/xml.xsd.
+   The schema document at that URI may however change in the future,
+   in order to remain compatible with the latest version of XML Schema
+   itself.  In other words, if the XML Schema namespace changes, the version
+   of this document at
+   http://www.w3.org/2001/xml.xsd will change
+   accordingly; the version at
+   http://www.w3.org/2001/03/xml.xsd will not change.
+  </xs:documentation>
+ </xs:annotation>
+
+ <xs:attribute name="lang" type="xs:language">
+  <xs:annotation>
+   <xs:documentation>In due course, we should install the relevant ISO 2- and 3-letter
+	 codes as the enumerated possible values . . .</xs:documentation>
+  </xs:annotation>
+ </xs:attribute>
+
+ <xs:attribute name="space" default="preserve">
+  <xs:simpleType>
+   <xs:restriction base="xs:NCName">
+    <xs:enumeration value="default"/>
+    <xs:enumeration value="preserve"/>
+   </xs:restriction>
+  </xs:simpleType>
+ </xs:attribute>
+
+ <xs:attribute name="base" type="xs:anyURI">
+  <xs:annotation>
+   <xs:documentation>See http://www.w3.org/TR/xmlbase/ for
+		     information about this attribute.</xs:documentation>
+  </xs:annotation>
+ </xs:attribute>
+
+ <xs:attributeGroup name="specialAttrs">
+  <xs:attribute ref="xml:base"/>
+  <xs:attribute ref="xml:lang"/>
+  <xs:attribute ref="xml:space"/>
+ </xs:attributeGroup>
+
+</xs:schema>
\ No newline at end of file
diff --git a/tests/data/datasetContacts1.json b/tests/data/datasetContacts1.json
index 3953fc6daef..8c4a7cb2b11 100644
--- a/tests/data/datasetContacts1.json
+++ b/tests/data/datasetContacts1.json
@@ -60,7 +60,7 @@
                   "typeName": "datasetContactName",
                   "multiple": false,
                   "typeClass": "primitive",
-                  "value": "LastContact2, FirstContact2"
+                  "value": "Simpson, Homer"
                 },
                 "datasetContactAffiliation": {
                   "typeName": "datasetContactAffiliation",
diff --git a/tests/integration-tests.txt b/tests/integration-tests.txt
index 1e9110be2de..3c4f7dce31f 100644
--- a/tests/integration-tests.txt
+++ b/tests/integration-tests.txt
@@ -1 +1 @@
-DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT
+DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,ProvIT,S3AccessIT
diff --git a/tests/run_docker_dataverse.sh b/tests/run_docker_dataverse.sh
deleted file mode 100644
index 56d55f8b057..00000000000
--- a/tests/run_docker_dataverse.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-cd ..
-. ./conf/docker-aio/prep_it.bash
diff --git a/tests/shell/spec/update_fields_spec.sh b/tests/shell/spec/update_fields_spec.sh
index e77121672dd..48054a121b7 100644
--- a/tests/shell/spec/update_fields_spec.sh
+++ b/tests/shell/spec/update_fields_spec.sh
@@ -1,16 +1,16 @@
 #shellcheck shell=sh
 
 update_fields() {
-  ../../conf/solr/8.11.1/update-fields.sh "$@"
+  ../../conf/solr/9.3.0/update-fields.sh "$@"
 }
 
 Describe "Update fields command"
 
   Describe "can operate on upstream data"
-    copyUpstreamSchema() { cp ../../conf/solr/8.11.1/schema.xml data/solr/upstream-schema.xml; }
+    copyUpstreamSchema() { cp ../../conf/solr/9.3.0/schema.xml data/solr/upstream-schema.xml; }
     AfterAll 'copyUpstreamSchema'
 
-    Path schema-xml="../../conf/solr/8.11.1/schema.xml"
+    Path schema-xml="../../conf/solr/9.3.0/schema.xml"
     It "needs upstream schema.xml"
       The path schema-xml should be exist
     End