diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 7e6995d76d9..3dba7d52109 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -14,7 +14,7 @@ Thank you for contributing to the Dataverse Project through the creation of a bu WARNING: If this is a security issue it should be reported privately to security@dataverse.org More information on bug issues and contributions can be found in the "Contributing to Dataverse" page: -https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#bug-reportsissues +https://guides.dataverse.org/en/latest/contributor/index.html Please fill out as much of the template as you can. Start below this comment section. @@ -44,7 +44,6 @@ Start below this comment section. **Any related open or closed issues to this bug report?** - **Screenshots:** No matter the issue, screenshots are always welcome. @@ -53,3 +52,7 @@ To add a screenshot, please use one of the following formats and/or methods desc * https://help.github.com/en/articles/file-attachments-on-issues-and-pull-requests * + + +**Are you thinking about creating a pull request for this issue?** +Help is always welcome, is this bug something you or your organization plan to fix? diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index d6248537418..7365cb4317c 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,7 +1,7 @@ --- name: Feature request about: Suggest an idea or new feature for the Dataverse software! -title: 'Feature Request/Idea:' +title: 'Feature Request:' labels: 'Type: Feature' assignees: '' @@ -11,7 +11,7 @@ assignees: '' Thank you for contributing to the Dataverse Project through the creation of a feature request! More information on ideas/feature requests and contributions can be found in the "Contributing to Dataverse" page: -https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#ideasfeature-requests +https://guides.dataverse.org/en/latest/contributor/index.html Please fill out as much of the template as you can. Start below this comment section. @@ -34,3 +34,6 @@ Start below this comment section. **Any open or closed issues related to this feature request?** + +**Are you thinking about creating a pull request for this feature?** +Help is always welcome, is this feature something you or your organization plan to implement? diff --git a/.github/ISSUE_TEMPLATE/idea_proposal.md b/.github/ISSUE_TEMPLATE/idea_proposal.md new file mode 100644 index 00000000000..8cb6c7bfafe --- /dev/null +++ b/.github/ISSUE_TEMPLATE/idea_proposal.md @@ -0,0 +1,40 @@ +--- +name: Idea proposal +about: Propose a new idea for discussion to improve the Dataverse software! +title: 'Suggestion:' +labels: 'Type: Suggestion' +assignees: '' + +--- + + + +**Overview of the Suggestion** + + +**What kind of user is the suggestion intended for?** +(Example users roles: API User, Curator, Depositor, Guest, Superuser, Sysadmin) + + +**What inspired this idea?** + + +**What existing behavior do you want changed?** + + +**Any brand new behavior do you want to add to Dataverse?** + + +**Any open or closed issues related to this suggestion?** + + +**Are you thinking about creating a pull request for this issue?** +Help is always welcome, is this idea something you or your organization plan to implement? diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index b57aa23fc0f..f2a779bbf21 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -2,7 +2,7 @@ **Which issue(s) this PR closes**: -Closes # +- Closes # **Special notes for your reviewer**: diff --git a/.github/actions/setup-maven/action.yml b/.github/actions/setup-maven/action.yml new file mode 100644 index 00000000000..4cf09f34231 --- /dev/null +++ b/.github/actions/setup-maven/action.yml @@ -0,0 +1,37 @@ +--- +name: "Setup Maven and Caches" +description: "Determine Java version and setup Maven, including necessary caches." +inputs: + git-reference: + description: 'The git reference (branch/tag) to check out' + required: false + default: '${{ github.ref }}' + pom-paths: + description: "List of paths to Maven POM(s) for cache dependency setup" + required: false + default: 'pom.xml' +runs: + using: composite + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: ${{ inputs.git-reference }} + - name: Determine Java version by reading the Maven property + shell: bash + run: | + echo "JAVA_VERSION=$(grep '' ${GITHUB_WORKSPACE}/modules/dataverse-parent/pom.xml | cut -f2 -d'>' | cut -f1 -d'<')" | tee -a ${GITHUB_ENV} + - name: Set up JDK ${{ env.JAVA_VERSION }} + id: setup-java + uses: actions/setup-java@v4 + with: + java-version: ${{ env.JAVA_VERSION }} + distribution: 'temurin' + cache: 'maven' + cache-dependency-path: ${{ inputs.pom-paths }} + - name: Download common cache on branch cache miss + if: ${{ steps.setup-java.outputs.cache-hit != 'true' }} + uses: actions/cache/restore@v4 + with: + key: dataverse-maven-cache + path: ~/.m2/repository diff --git a/.github/workflows/check_property_files.yml b/.github/workflows/check_property_files.yml new file mode 100644 index 00000000000..505310aab35 --- /dev/null +++ b/.github/workflows/check_property_files.yml @@ -0,0 +1,32 @@ +name: "Properties Check" +on: + pull_request: + paths: + - "src/**/*.properties" + - "scripts/api/data/metadatablocks/*" +jobs: + duplicate_keys: + name: Duplicate Keys + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run duplicates detection script + shell: bash + run: tests/check_duplicate_properties.sh + + metadata_blocks_properties: + name: Metadata Blocks Properties + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Setup GraalVM + Native Image + uses: graalvm/setup-graalvm@v1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + java-version: '21' + distribution: 'graalvm-community' + - name: Setup JBang + uses: jbangdev/setup-jbang@main + - name: Run metadata block properties verification script + shell: bash + run: tests/verify_mdb_properties.sh diff --git a/.github/workflows/container_app_push.yml b/.github/workflows/container_app_push.yml index b3e247e376c..3b7ce066d73 100644 --- a/.github/workflows/container_app_push.yml +++ b/.github/workflows/container_app_push.yml @@ -5,6 +5,12 @@ on: # We are deliberately *not* running on push events here to avoid double runs. # Instead, push events will trigger from the base image and maven unit tests via workflow_call. workflow_call: + inputs: + base-image-ref: + type: string + description: "Reference of the base image to build on in full qualified form [/]/:" + required: false + default: "gdcc/base:unstable" pull_request: branches: - develop @@ -16,7 +22,6 @@ on: env: IMAGE_TAG: unstable - BASE_IMAGE_TAG: unstable REGISTRY: "" # Empty means default to Docker Hub PLATFORMS: "linux/amd64,linux/arm64" MASTER_BRANCH_TAG: alpha @@ -33,20 +38,24 @@ jobs: if: ${{ github.repository_owner == 'IQSS' }} steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Set up JDK - uses: actions/setup-java@v3 + - name: Checkout and Setup Maven + uses: IQSS/dataverse/.github/actions/setup-maven@develop with: - java-version: "17" - distribution: temurin - cache: maven + pom-paths: | + pom.xml + modules/container-configbaker/pom.xml + modules/dataverse-parent/pom.xml + + # TODO: Add a filter step here, that avoids building the image if this is a PR and there are other files touched than declared above. + # Use https://github.com/dorny/paths-filter to solve this. This will ensure we do not run this twice if this workflow + # will be triggered by the other workflows already (base image or java changes) + # To become a part of #10618. - name: Build app and configbaker container image with local architecture and submodules (profile will skip tests) run: > mvn -B -f modules/dataverse-parent -P ct -pl edu.harvard.iq:dataverse -am + $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" ) install # TODO: add smoke / integration testing here (add "-Pct -DskipIntegrationTests=false") @@ -106,11 +115,13 @@ jobs: if: needs.check-secrets.outputs.available == 'true' && ( github.event_name != 'push' || ( github.event_name == 'push' && contains(fromJSON('["develop", "master"]'), github.ref_name))) steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - name: Checkout and Setup Maven + uses: IQSS/dataverse/.github/actions/setup-maven@develop with: - java-version: "17" - distribution: temurin + pom-paths: | + pom.xml + modules/container-configbaker/pom.xml + modules/dataverse-parent/pom.xml # Depending on context, we push to different targets. Login accordingly. - if: github.event_name != 'pull_request' @@ -146,11 +157,13 @@ jobs: run: > mvn -B -f modules/dataverse-parent -P ct -pl edu.harvard.iq:dataverse -am + $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" ) install - name: Deploy multi-arch application and configbaker container image run: > mvn - -Dapp.image.tag=${{ env.IMAGE_TAG }} -Dbase.image.tag=${{ env.BASE_IMAGE_TAG }} + -Dapp.image.tag=${{ env.IMAGE_TAG }} + $( [[ -n "${{ inputs.base-image-ref }}" ]] && echo "-Dbase.image=${{ inputs.base-image-ref }}" ) ${{ env.REGISTRY }} -Ddocker.platforms=${{ env.PLATFORMS }} -P ct deploy diff --git a/.github/workflows/container_base_push.yml b/.github/workflows/container_base_push.yml index b938851f816..c2340576c78 100644 --- a/.github/workflows/container_base_push.yml +++ b/.github/workflows/container_base_push.yml @@ -1,99 +1,130 @@ --- -name: Base Container Image +name: Container Images Releasing on: push: + tags: + - 'v[6-9].**' branches: - 'develop' - - 'master' + # "Path filters are not evaluated for pushes of tags" https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#onpushpull_requestpull_request_targetpathspaths-ignore paths: - 'modules/container-base/**' + - '!modules/container-base/src/backports/**' + - '!modules/container-base/README.md' - 'modules/dataverse-parent/pom.xml' - '.github/workflows/container_base_push.yml' - pull_request: - branches: - - 'develop' - - 'master' - paths: - - 'modules/container-base/**' - - 'modules/dataverse-parent/pom.xml' - - '.github/workflows/container_base_push.yml' - schedule: - - cron: '23 3 * * 0' # Run for 'develop' every Sunday at 03:23 UTC + + # These TODOs are left for #10618 + # TODO: we are missing a workflow_call option here, so we can trigger this flow from pr comments and maven tests (keep the secrets availability in mind!) + # TODO: we are missing a pull_request option here (filter for stuff that would trigger the maven runs!) so we can trigger preview builds for them when coming from the main repo (keep the secrets availability in mind!) env: - IMAGE_TAG: unstable PLATFORMS: linux/amd64,linux/arm64 + DEVELOPMENT_BRANCH: develop jobs: build: - name: Build image + name: Base Image runs-on: ubuntu-latest permissions: contents: read packages: read - strategy: - matrix: - jdk: [ '17' ] # Only run in upstream repo - avoid unnecessary runs in forks if: ${{ github.repository_owner == 'IQSS' }} + outputs: + base-image-ref: ${{ steps.finalize.outputs.base-image-ref }} steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Set up JDK ${{ matrix.jdk }} - uses: actions/setup-java@v3 + - name: Checkout and Setup Maven + uses: IQSS/dataverse/.github/actions/setup-maven@develop with: - java-version: ${{ matrix.jdk }} - distribution: 'adopt' - - name: Cache Maven packages - uses: actions/cache@v3 - with: - path: ~/.m2 - key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} - restore-keys: ${{ runner.os }}-m2 - - - name: Build base container image with local architecture - run: mvn -f modules/container-base -Pct package + pom-paths: modules/container-base/pom.xml - # Run anything below only if this is not a pull request. - # Accessing, pushing tags etc. to DockerHub will only succeed in upstream because secrets. - - - if: ${{ github.event_name == 'push' && github.ref_name == 'develop' }} - name: Push description to DockerHub - uses: peter-evans/dockerhub-description@v3 + # Note: Accessing, pushing tags etc. to DockerHub will only succeed in upstream and + # on events in context of upstream because secrets. PRs run in context of forks by default! + - name: Log in to the Container registry + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - repository: gdcc/base - short-description: "Dataverse Base Container image providing Payara application server and optimized configuration" - readme-filepath: ./modules/container-base/README.md - - if: ${{ github.event_name != 'pull_request' }} - name: Log in to the Container registry - uses: docker/login-action@v2 + # In case this is a push to develop, we care about buildtime. + # Configure a remote ARM64 build host in addition to the local AMD64 in two steps. + - name: Setup SSH agent + if: ${{ github.event_name != 'schedule' }} + uses: webfactory/ssh-agent@v0.9.0 with: - registry: ${{ env.REGISTRY }} - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - if: ${{ github.event_name != 'pull_request' }} - name: Set up QEMU for multi-arch builds - uses: docker/setup-qemu-action@v2 - - name: Re-set image tag based on branch - if: ${{ github.ref_name == 'master' }} - run: echo "IMAGE_TAG=alpha" >> $GITHUB_ENV - - if: ${{ github.event_name != 'pull_request' }} - name: Deploy multi-arch base container image to Docker Hub - run: mvn -f modules/container-base -Pct deploy -Dbase.image.tag=${{ env.IMAGE_TAG }} -Ddocker.platforms=${{ env.PLATFORMS }} + ssh-private-key: ${{ secrets.BUILDER_ARM64_SSH_PRIVATE_KEY }} + - name: Provide the known hosts key and the builder config + if: ${{ github.event_name != 'schedule' }} + run: | + echo "${{ secrets.BUILDER_ARM64_SSH_HOST_KEY }}" > ~/.ssh/known_hosts + mkdir -p modules/container-base/target/buildx-state/buildx/instances + cat > modules/container-base/target/buildx-state/buildx/instances/maven << EOF + { "Name": "maven", + "Driver": "docker-container", + "Dynamic": false, + "Nodes": [{"Name": "maven0", + "Endpoint": "unix:///var/run/docker.sock", + "Platforms": [{"os": "linux", "architecture": "amd64"}], + "DriverOpts": null, + "Flags": ["--allow-insecure-entitlement=network.host"], + "Files": null}, + {"Name": "maven1", + "Endpoint": "ssh://${{ secrets.BUILDER_ARM64_SSH_CONNECTION }}", + "Platforms": [{"os": "linux", "architecture": "arm64"}], + "DriverOpts": null, + "Flags": ["--allow-insecure-entitlement=network.host"], + "Files": null}]} + EOF + + # Determine the base image name we are going to use from here on + - name: Determine base image name + run: | + if [[ "${{ github.ref_name }}" = "${{ env.DEVELOPMENT_BRANCH }}" ]]; then + echo "BASE_IMAGE=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -q -DforceStdout )" | tee -a "${GITHUB_ENV}" + echo "BASE_IMAGE_UPCOMING=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout )" | tee -a "${GITHUB_ENV}" + else + echo "BASE_IMAGE=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout )" | tee -a "${GITHUB_ENV}" + fi + - name: Calculate revision number for immutable tag (on release branches only) + if: ${{ github.ref_name != env.DEVELOPMENT_BRANCH }} + id: revision-tag + uses: ./.github/actions/get-image-revision + with: + image-ref: ${{ env.BASE_IMAGE }} + tag-options-prefix: "-Dbase.image.tag.suffix='' -Ddocker.tags.revision=" + - name: Configure update of "latest" tag for development branch + id: develop-tag + if: ${{ github.ref_name == env.DEVELOPMENT_BRANCH }} + run: | + echo "tag-options=-Ddocker.tags.develop=unstable -Ddocker.tags.upcoming=${BASE_IMAGE_UPCOMING#*:}" | tee -a "${GITHUB_OUTPUT}" + + - name: Deploy multi-arch base container image to Docker Hub + id: build + run: | + mvn -f modules/container-base -Pct deploy -Ddocker.noCache -Ddocker.platforms=${{ env.PLATFORMS }} \ + -Ddocker.imagePropertyConfiguration=override ${{ steps.develop-tag.outputs.tag-options }} ${{ steps.revision-tag.outputs.tag-options }} + + - name: Determine appropriate base image ref for app image + id: finalize + run: | + if [[ "${{ github.ref_name }}" = "${{ env.DEVELOPMENT_BRANCH }}" ]]; then + echo "base-image-ref=${BASE_IMAGE_UPCOMING}" | tee -a "$GITHUB_OUTPUT" + else + echo "base-image-ref=gdcc/base:${{ steps.revision-tag.outputs.revision-tag }}" | tee -a "$GITHUB_OUTPUT" + fi + push-app-img: name: "Rebase & Publish App Image" permissions: contents: read packages: write pull-requests: write - needs: build - # We do not release a new base image for pull requests, so do not trigger. - if: ${{ github.event_name != 'pull_request' }} - uses: ./.github/workflows/container_app_push.yml secrets: inherit + needs: + - build + uses: ./.github/workflows/container_app_push.yml + with: + base-image-ref: ${{ needs.build.outputs.base-image-ref }} diff --git a/.github/workflows/container_maintenance.yml b/.github/workflows/container_maintenance.yml new file mode 100644 index 00000000000..986fe25cdf5 --- /dev/null +++ b/.github/workflows/container_maintenance.yml @@ -0,0 +1,119 @@ +--- +name: Container Images Scheduled Maintenance + +on: + # TODO: think about adding a (filtered) push event trigger here in case we change the patches + # --- + # Allow manual workflow triggers in case we need to repair images on Docker Hub (build and replace) + workflow_dispatch: + inputs: + force_build: + type: boolean + required: false + default: false + description: "Build and deploy even if no newer Java images or package updates are found." + schedule: + - cron: '23 3 * * 0' # Run for 'develop' every Sunday at 03:23 UTC + +env: + PLATFORMS: linux/amd64,linux/arm64 + NUM_PAST_RELEASES: 3 + +jobs: + build: + name: Base Image Matrix Build + runs-on: ubuntu-latest + permissions: + contents: read + packages: read + # Only run in upstream repo - avoid unnecessary runs in forks + if: ${{ github.repository_owner == 'IQSS' }} + outputs: + supported_tag_matrix: ${{ steps.execute.outputs.supported_tag_matrix }} + rebuilt_base_images: ${{ steps.execute.outputs.rebuilt_base_images }} + + steps: + - name: Checkout and Setup Maven + uses: IQSS/dataverse/.github/actions/setup-maven@develop + with: + pom-paths: modules/container-base/pom.xml + + # Note: Accessing, pushing tags etc. to DockerHub will only succeed in upstream and + # on events in context of upstream because secrets. PRs run in context of forks by default! + - name: Log in to the Container registry + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Set up QEMU for multi-arch builds + uses: docker/setup-qemu-action@v3 + with: + platforms: ${{ env.PLATFORMS }} + + # Discover the releases we want to maintain + - name: Discover maintained releases + id: discover + run: | + echo "FORCE_BUILD=$( [[ "${{ inputs.force_build }}" = "true" ]] && echo 1 || echo 0 )" | tee -a "$GITHUB_ENV" + DEVELOPMENT_BRANCH=$( curl -f -sS https://api.github.com/repos/${{ github.repository }} | jq -r '.default_branch' ) + echo "DEVELOPMENT_BRANCH=$DEVELOPMENT_BRANCH" | tee -a "$GITHUB_ENV" + echo "branches=$( curl -f -sS https://api.github.com/repos/IQSS/dataverse/releases | jq -r " .[0:${{ env.NUM_PAST_RELEASES }}] | .[].tag_name, \"${DEVELOPMENT_BRANCH}\" " | tr "\n" " " )" | tee -a "${GITHUB_OUTPUT}" + + # Execute matrix build for the discovered branches + - name: Execute build matrix script + id: execute + run: | + .github/workflows/scripts/maintenance-job.sh ${{ steps.discover.outputs.branches }} + + # TODO: Use the needs.build.outputs.rebuilt_base_images with fromJSON() to create a matrix job. + # Must be a single rank matrix (vector), the branch and base image tag information ships as "branch=tag" string + # Will be part of working on #10618, app image versioned tags. + #push-app-img: + # name: "Rebase & Publish App Image" + # permissions: + # contents: read + # packages: write + # pull-requests: write + # secrets: inherit + # needs: + # - build + # strategy: + # fail-fast: false + # matrix: + # branch: ${{ fromJson(needs.discover.outputs.branches) }} + # uses: ./.github/workflows/container_app_push.yml + # with: + # branch: ${{ matrix.branch }} + + hub-description: + name: Push description to DockerHub + runs-on: ubuntu-latest + permissions: + contents: read + packages: read + needs: build + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Render README + id: render + run: | + TAGS_JSON='${{ needs.build.outputs.supported_tag_matrix }}' + echo "$TAGS_JSON" | jq -r 'keys | sort | reverse | .[]' | + while IFS= read -r branch; do + echo \ + "- \`$( echo "$TAGS_JSON" | jq --arg v "$branch" -r '.[$v] | join("`, `")' )\`" \ + "([Dockerfile](https://github.com/IQSS/dataverse/blob/${branch}/modules/container-base/src/main/docker/Dockerfile)," \ + "[Patches](https://github.com/IQSS/dataverse/blob/develop/modules/container-base/src/backports/${branch}))" \ + | tee -a "${GITHUB_WORKSPACE}/tags.md" + done + sed -i -e "/<\!-- TAG BLOCK HERE -->/r ${GITHUB_WORKSPACE}/tags.md" "./modules/container-base/README.md" + + - name: Push description to DockerHub + uses: peter-evans/dockerhub-description@v4 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + repository: gdcc/base + short-description: "Dataverse Base Container image providing Payara application server and optimized configuration" + readme-filepath: ./modules/container-base/README.md \ No newline at end of file diff --git a/.github/workflows/maven_unit_test.yml b/.github/workflows/maven_unit_test.yml index 4ad4798bc64..a94b17a67ba 100644 --- a/.github/workflows/maven_unit_test.yml +++ b/.github/workflows/maven_unit_test.yml @@ -30,6 +30,7 @@ jobs: continue-on-error: ${{ matrix.experimental }} runs-on: ubuntu-latest steps: + # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - uses: actions/checkout@v3 - name: Set up JDK ${{ matrix.jdk }} @@ -95,6 +96,7 @@ jobs: # status: "Experimental" continue-on-error: ${{ matrix.experimental }} steps: + # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - uses: actions/checkout@v3 - name: Set up JDK ${{ matrix.jdk }} @@ -128,6 +130,7 @@ jobs: needs: integration-test name: Coverage Report Submission steps: + # TODO: As part of #10618 change to setup-maven custom action # Basic setup chores - uses: actions/checkout@v3 - uses: actions/setup-java@v3 @@ -156,6 +159,11 @@ jobs: # NOTE: this may be extended with adding a report to the build output, leave a comment, send to Sonarcloud, ... + # TODO: Add a filter step here, that avoids calling the app image release workflow if there are changes to the base image. + # Use https://github.com/dorny/paths-filter to solve this. Will require and additional job or adding to integration-test job. + # This way we ensure that we're not running the app image flow with a non-matching base image. + # To become a part of #10618. + push-app-img: name: Publish App Image permissions: diff --git a/.github/workflows/scripts/maintenance-job.sh b/.github/workflows/scripts/maintenance-job.sh new file mode 100755 index 00000000000..370988b9812 --- /dev/null +++ b/.github/workflows/scripts/maintenance-job.sh @@ -0,0 +1,180 @@ +#!/bin/bash + +# A matrix-like job to maintain a number of releases as well as the latest snap of Dataverse. + +# PREREQUISITES: +# - You have Java, Maven, QEMU and Docker all setup and ready to go +# - You obviously checked out the develop branch, otherwise you'd not be executing this script +# - You added all the branch names you want to run maintenance for as arguments +# Optional, but recommended: +# - You added a DEVELOPMENT_BRANCH env var to your runner/job env with the name of the development branch +# - You added a FORCE_BUILD=0|1 env var to indicate if the base image build should be forced +# - You added a PLATFORMS env var with all the target platforms you want to build for + +# NOTE: +# This script is a culmination of Github Action steps into a single script. +# The reason to put all of this in here is due to the complexity of the Github Action and the limitation of the +# matrix support in Github actions, where outputs cannot be aggregated or otherwise used further. + +set -euo pipefail + +# Get all the inputs +# If not within a runner, just print to stdout (duplicating the output in case of tee usage, but that's ok for testing) +GITHUB_OUTPUT=${GITHUB_OUTPUT:-"/proc/self/fd/1"} +GITHUB_ENV=${GITHUB_ENV:-"/proc/self/fd/1"} +GITHUB_WORKSPACE=${GITHUB_WORKSPACE:-"$(pwd)"} +GITHUB_SERVER_URL=${GITHUB_SERVER_URL:-"https://github.com"} +GITHUB_REPOSITORY=${GITHUB_REPOSITORY:-"IQSS/dataverse"} + +MAINTENANCE_WORKSPACE="${GITHUB_WORKSPACE}/maintenance-job" + +DEVELOPMENT_BRANCH="${DEVELOPMENT_BRANCH:-"develop"}" +FORCE_BUILD="${FORCE_BUILD:-"0"}" +PLATFORMS="${PLATFORMS:-"linux/amd64,linux/arm64"}" + +# Setup and validation +if [[ -z "$*" ]]; then + >&2 echo "You must give a list of branch names as arguments" + exit 1; +fi + +source "$( dirname "$0" )/utils.sh" + +# Delete old stuff if present +rm -rf "$MAINTENANCE_WORKSPACE" +mkdir -p "$MAINTENANCE_WORKSPACE" + +# Store the image tags we maintain in this array (same order as branches array!) +# This list will be used to build the support matrix within the Docker Hub image description +SUPPORTED_ROLLING_TAGS=() +# Store the tags of base images we are actually rebuilding to base new app images upon +# Takes the from "branch-name=base-image-ref" +REBUILT_BASE_IMAGES=() + +for BRANCH in "$@"; do + echo "::group::Running maintenance for $BRANCH" + + # 0. Determine if this is a development branch and the most current release + IS_DEV=0 + if [[ "$BRANCH" = "$DEVELOPMENT_BRANCH" ]]; then + IS_DEV=1 + fi + IS_CURRENT_RELEASE=0 + if [[ "$BRANCH" = $( curl -f -sS "https://api.github.com/repos/$GITHUB_REPOSITORY/releases" | jq -r '.[0].tag_name' ) ]]; then + IS_CURRENT_RELEASE=1 + fi + + # 1. Let's get the maintained sources + git clone -c advice.detachedHead=false --depth 1 --branch "$BRANCH" "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}" "$MAINTENANCE_WORKSPACE/$BRANCH" + # Switch context + cd "$MAINTENANCE_WORKSPACE/$BRANCH" + + # 2. Now let's apply the patches (we have them checked out in $GITHUB_WORKSPACE, not necessarily in this local checkout) + echo "Checking for patches..." + if [[ -d ${GITHUB_WORKSPACE}/modules/container-base/src/backports/$BRANCH ]]; then + echo "Applying patches now." + find "${GITHUB_WORKSPACE}/modules/container-base/src/backports/$BRANCH" -type f -name '*.patch' -print0 | xargs -0 -n1 patch -p1 -s -i + fi + + # 3. Determine the base image ref (/:) + BASE_IMAGE_REF="" + # For the dev branch we want to full flexi stack tag, to detect stack upgrades requiring new build + if (( IS_DEV )); then + BASE_IMAGE_REF=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -q -DforceStdout ) + else + BASE_IMAGE_REF=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image -Dbase.image.tag.suffix="" -q -DforceStdout ) + fi + echo "Determined BASE_IMAGE_REF=$BASE_IMAGE_REF from Maven" + + # 4. Check for Temurin image updates + JAVA_IMAGE_REF=$( mvn help:evaluate -Pct -f modules/container-base -Dexpression=java.image -q -DforceStdout ) + echo "Determined JAVA_IMAGE_REF=$JAVA_IMAGE_REF from Maven" + NEWER_JAVA_IMAGE=0 + if check_newer_parent "$JAVA_IMAGE_REF" "$BASE_IMAGE_REF"; then + NEWER_JAVA_IMAGE=1 + fi + + # 5. Check for package updates in base image + PKGS="$( grep "ARG PKGS" modules/container-base/src/main/docker/Dockerfile | cut -f2 -d= | tr -d '"' )" + echo "Determined installed packages=\"$PKGS\" from Maven" + NEWER_PKGS=0 + # Don't bother with package checks if the java image is newer already + if ! (( NEWER_JAVA_IMAGE )); then + if check_newer_pkgs "$BASE_IMAGE_REF" "$PKGS"; then + NEWER_PKGS=1 + fi + fi + + # 6. Get current immutable revision tag if not on the dev branch + REV=$( current_revision "$BASE_IMAGE_REF" ) + CURRENT_REV_TAG="${BASE_IMAGE_REF#*:}-r$REV" + NEXT_REV_TAG="${BASE_IMAGE_REF#*:}-r$(( REV + 1 ))" + + # 7. Let's put together what tags we want added to this build run + TAG_OPTIONS="" + if ! (( IS_DEV )); then + TAG_OPTIONS="-Dbase.image=$BASE_IMAGE_REF -Ddocker.tags.revision=$NEXT_REV_TAG" + # In case of the current release, add the "latest" tag as well. + if (( IS_CURRENT_RELEASE )); then + TAG_OPTIONS="$TAG_OPTIONS -Ddocker.tags.latest=latest" + fi + else + UPCOMING_TAG=$( mvn initialize help:evaluate -Pct -f modules/container-base -Dexpression=base.image.tag -Dbase.image.tag.suffix="" -q -DforceStdout ) + TAG_OPTIONS="-Ddocker.tags.develop=unstable -Ddocker.tags.upcoming=$UPCOMING_TAG" + + # For the dev branch we only have rolling tags and can add them now already + SUPPORTED_ROLLING_TAGS+=("[\"unstable\", \"$UPCOMING_TAG\", \"${BASE_IMAGE_REF#*:}\"]") + fi + echo "Determined these additional Maven tag options: $TAG_OPTIONS" + + # 8. Let's build the base image if necessary + NEWER_IMAGE=0 + if (( NEWER_JAVA_IMAGE + NEWER_PKGS + FORCE_BUILD > 0 )); then + mvn -Pct -f modules/container-base deploy -Ddocker.noCache -Ddocker.platforms="${PLATFORMS}" \ + -Ddocker.imagePropertyConfiguration=override $TAG_OPTIONS + NEWER_IMAGE=1 + # Save the information about the immutable or rolling tag we just built + if ! (( IS_DEV )); then + REBUILT_BASE_IMAGES+=("$BRANCH=${BASE_IMAGE_REF%:*}:$NEXT_REV_TAG") + else + REBUILT_BASE_IMAGES+=("$BRANCH=$BASE_IMAGE_REF") + fi + else + echo "No rebuild necessary, we're done here." + fi + + # 9. Add list of rolling and immutable tags for release builds + if ! (( IS_DEV )); then + RELEASE_TAGS_LIST="[" + if (( IS_CURRENT_RELEASE )); then + RELEASE_TAGS_LIST+="\"latest\", " + fi + RELEASE_TAGS_LIST+="\"${BASE_IMAGE_REF#*:}\", " + if (( NEWER_IMAGE )); then + RELEASE_TAGS_LIST+="\"$NEXT_REV_TAG\"]" + else + RELEASE_TAGS_LIST+="\"$CURRENT_REV_TAG\"]" + fi + SUPPORTED_ROLLING_TAGS+=("${RELEASE_TAGS_LIST}") + fi + + echo "::endgroup::" +done + +# Built the output which base images have actually been rebuilt as JSON +REBUILT_IMAGES="[" +for IMAGE in "${REBUILT_BASE_IMAGES[@]}"; do + REBUILT_IMAGES+=" \"$IMAGE\" " +done +REBUILT_IMAGES+="]" +echo "rebuilt_base_images=${REBUILT_IMAGES// /, }" | tee -a "${GITHUB_OUTPUT}" + +# Built the supported rolling tags matrix as JSON +SUPPORTED_TAGS="{" +for (( i=0; i < ${#SUPPORTED_ROLLING_TAGS[@]} ; i++ )); do + j=$((i+1)) + SUPPORTED_TAGS+="\"${!j}\": ${SUPPORTED_ROLLING_TAGS[$i]}" + (( i < ${#SUPPORTED_ROLLING_TAGS[@]}-1 )) && SUPPORTED_TAGS+=", " +done +SUPPORTED_TAGS+="}" +echo "supported_tag_matrix=$SUPPORTED_TAGS" | tee -a "$GITHUB_OUTPUT" diff --git a/.github/workflows/scripts/utils.sh b/.github/workflows/scripts/utils.sh new file mode 100644 index 00000000000..987b58d8bb5 --- /dev/null +++ b/.github/workflows/scripts/utils.sh @@ -0,0 +1,108 @@ +#!/bin/bash + +set -euo pipefail + +function check_newer_parent() { + PARENT_IMAGE="$1" + # Get namespace, default to "library" if not found + PARENT_IMAGE_NS="${PARENT_IMAGE%/*}" + if [[ "$PARENT_IMAGE_NS" = "${PARENT_IMAGE}" ]]; then + PARENT_IMAGE_NS="library" + fi + PARENT_IMAGE_REPO="${PARENT_IMAGE%:*}" + PARENT_IMAGE_TAG="${PARENT_IMAGE#*:}" + + PARENT_IMAGE_LAST_UPDATE="$( curl -sS "https://hub.docker.com/v2/namespaces/${PARENT_IMAGE_NS}/repositories/${PARENT_IMAGE_REPO}/tags/${PARENT_IMAGE_TAG}" | jq -r .last_updated )" + if [[ "$PARENT_IMAGE_LAST_UPDATE" = "null" ]]; then + echo "::error title='Invalid PARENT Image'::Could not find ${PARENT_IMAGE} in the registry" + exit 1 + fi + + DERIVED_IMAGE="$2" + # Get namespace, default to "library" if not found + DERIVED_IMAGE_NS="${DERIVED_IMAGE%/*}" + if [[ "${DERIVED_IMAGE_NS}" = "${DERIVED_IMAGE}" ]]; then + DERIVED_IMAGE_NS="library" + fi + DERIVED_IMAGE_REPO="$( echo "${DERIVED_IMAGE%:*}" | cut -f2 -d/ )" + DERIVED_IMAGE_TAG="${DERIVED_IMAGE#*:}" + + DERIVED_IMAGE_LAST_UPDATE="$( curl -sS "https://hub.docker.com/v2/namespaces/${DERIVED_IMAGE_NS}/repositories/${DERIVED_IMAGE_REPO}/tags/${DERIVED_IMAGE_TAG}" | jq -r .last_updated )" + if [[ "$DERIVED_IMAGE_LAST_UPDATE" = "null" || "$DERIVED_IMAGE_LAST_UPDATE" < "$PARENT_IMAGE_LAST_UPDATE" ]]; then + echo "Parent image $PARENT_IMAGE has a newer release ($PARENT_IMAGE_LAST_UPDATE), which is more recent than $DERIVED_IMAGE ($DERIVED_IMAGE_LAST_UPDATE)" + return 0 + else + echo "Parent image $PARENT_IMAGE ($PARENT_IMAGE_LAST_UPDATE) is older than $DERIVED_IMAGE ($DERIVED_IMAGE_LAST_UPDATE)" + return 1 + fi +} + +function check_newer_pkgs() { + IMAGE="$1" + PKGS="$2" + + docker run --rm -u 0 "${IMAGE}" sh -c "apt update >/dev/null 2>&1 && apt install -s ${PKGS}" | tee /proc/self/fd/2 | grep -q "0 upgraded" + STATUS=$? + + if [[ $STATUS -eq 0 ]]; then + echo "Base image $IMAGE has no updates for our custom installed packages" + return 1 + else + echo "Base image $IMAGE needs updates for our custom installed packages" + return 0 + fi + + # TODO: In a future version of this script, we might want to include checking for other security updates, + # not just updates to the packages we installed. + # grep security /etc/apt/sources.list > /tmp/security.list + # apt-get update -oDir::Etc::Sourcelist=/tmp/security.list + # apt-get dist-upgrade -y -oDir::Etc::Sourcelist=/tmp/security.list -oDir::Etc::SourceParts=/bin/false -s + +} + +function current_revision() { + IMAGE="$1" + IMAGE_NS_REPO="${IMAGE%:*}" + IMAGE_TAG="${IMAGE#*:}" + + if [[ "$IMAGE_TAG" = "$IMAGE_NS_REPO" ]]; then + >&2 echo "You must provide an image reference in the format [/]:" + exit 1 + fi + + case "$IMAGE_NS_REPO" in + */*) :;; # namespace/repository syntax, leave as is + *) IMAGE_NS_REPO="library/$IMAGE_NS_REPO";; # bare repository name (docker official image); must convert to namespace/repository syntax + esac + + # Without such a token we may run into rate limits + # OB 2024-09-16: for some reason using this token stopped working. Let's go without and see if we really fall into rate limits. + # token=$( curl -s "https://auth.docker.io/token?service=registry.docker.io&scope=repository:$IMAGE_NS_REPO:pull" ) + + ALL_TAGS="$( + i=0 + while [ $? == 0 ]; do + i=$((i+1)) + # OB 2024-09-16: for some reason using this token stopped working. Let's go without and see if we really fall into rate limits. + # RESULT=$( curl -s -H "Authorization: Bearer $token" "https://registry.hub.docker.com/v2/repositories/$IMAGE_NS_REPO/tags/?page=$i&page_size=100" ) + RESULT=$( curl -s "https://registry.hub.docker.com/v2/repositories/$IMAGE_NS_REPO/tags/?page=$i&page_size=100" ) + if [[ $( echo "$RESULT" | jq '.message' ) != "null" ]]; then + # If we run into an error on the first attempt, that means we have a problem. + if [[ "$i" == "1" ]]; then + >&2 echo "Error when retrieving tag data: $( echo "$RESULT" | jq '.message' )" + exit 2 + # Otherwise it will just mean we reached the last page already + else + break + fi + else + echo "$RESULT" | jq -r '."results"[]["name"]' + # DEBUG: + #echo "$RESULT" | >&2 jq -r '."results"[]["name"]' + fi + done + )" + + # Note: if a former tag could not be found, it just might not exist already. Start new series with rev 0 + echo "$ALL_TAGS" | grep "${IMAGE_TAG}-r" | sed -e "s#${IMAGE_TAG}-r##" | sort -h | tail -n1 || echo "-1" +} diff --git a/.github/workflows/shellspec.yml b/.github/workflows/shellspec.yml index 227a74fa00f..3320d9d08a4 100644 --- a/.github/workflows/shellspec.yml +++ b/.github/workflows/shellspec.yml @@ -24,28 +24,11 @@ jobs: run: | cd tests/shell shellspec - shellspec-centos7: - name: "CentOS 7" + shellspec-rocky9: + name: "RockyLinux 9" runs-on: ubuntu-latest container: - image: centos:7 - steps: - - uses: actions/checkout@v2 - - name: Install shellspec - run: | - curl -fsSL https://github.com/shellspec/shellspec/releases/download/${{ env.SHELLSPEC_VERSION }}/shellspec-dist.tar.gz | tar -xz -C /usr/share - ln -s /usr/share/shellspec/shellspec /usr/bin/shellspec - - name: Install dependencies - run: yum install -y ed - - name: Run shellspec - run: | - cd tests/shell - shellspec - shellspec-rocky8: - name: "RockyLinux 8" - runs-on: ubuntu-latest - container: - image: rockylinux/rockylinux:8 + image: rockylinux/rockylinux:9 steps: - uses: actions/checkout@v2 - name: Install shellspec diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1430ba951a6..4fa6e955b70 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,67 +1,7 @@ # Contributing to Dataverse -Thank you for your interest in contributing to Dataverse! We are open to contributions from everyone. You don't need permission to participate. Just jump in. If you have questions, please reach out using one or more of the channels described below. +Thank you for your interest in contributing to Dataverse! We are open to contributions from everyone. -We aren't just looking for developers. There are many ways to contribute to Dataverse. We welcome contributions of ideas, bug reports, usability research/feedback, documentation, code, and more! +Please see our [Contributor Guide][] for how you can help! -## Ideas/Feature Requests - -Your idea or feature request might already be captured in the Dataverse [issue tracker] on GitHub but if not, the best way to bring it to the community's attention is by posting on the [dataverse-community Google Group][] or bringing it up on a [Community Call][]. You're also welcome to make some noise in [chat.dataverse.org][] or cram your idea into 280 characters and mention [@dataverseorg][] on Twitter. To discuss your idea privately, please email it to support@dataverse.org - -There's a chance your idea is already on our roadmap, which is available at https://www.iq.harvard.edu/roadmap-dataverse-project - -[chat.dataverse.org]: http://chat.dataverse.org -[issue tracker]: https://github.com/IQSS/dataverse/issues -[@dataverseorg]: https://twitter.com/dataverseorg - -## Usability testing - -Please email us at support@dataverse.org if you are interested in participating in usability testing. - -## Bug Reports/Issues - -An issue is a bug (a feature is no longer behaving the way it should) or a feature (something new to Dataverse that helps users complete tasks). You can browse the Dataverse [issue tracker] on GitHub by open or closed issues or by milestones. - -Before submitting an issue, please search the existing issues by using the search bar at the top of the page. If there is an existing open issue that matches the issue you want to report, please add a comment to it. - -If there is no pre-existing issue or it has been closed, please click on the "New Issue" button, log in, and write in what the issue is (unless it is a security issue which should be reported privately to security@dataverse.org). - -If you do not receive a reply to your new issue or comment in a timely manner, please email support@dataverse.org with a link to the issue. - -### Writing an Issue - -For the subject of an issue, please start it by writing the feature or functionality it relates to, i.e. "Create Account:..." or "Dataset Page:...". In the body of the issue, please outline the issue you are reporting with as much detail as possible. In order for the Dataverse development team to best respond to the issue, we need as much information about the issue as you can provide. Include steps to reproduce bugs. Indicate which version you're using, which is shown at the bottom of the page. We love screenshots! - -### Issue Attachments - -You can attach certain files (images, screenshots, logs, etc.) by dragging and dropping, selecting them, or pasting from the clipboard. Files must be one of GitHub's [supported attachment formats] such as png, gif, jpg, txt, pdf, zip, etc. (Pro tip: A file ending in .log can be renamed to .txt so you can upload it.) If there's no easy way to attach your file, please include a URL that points to the file in question. - -[supported attachment formats]: https://help.github.com/articles/file-attachments-on-issues-and-pull-requests/ - -## Documentation - -The source for the documentation at http://guides.dataverse.org/en/latest/ is in the GitHub repo under the "[doc][]" folder. If you find a typo or inaccuracy or something to clarify, please send us a pull request! For more on the tools used to write docs, please see the [documentation][] section of the Developer Guide. - -[doc]: https://github.com/IQSS/dataverse/tree/develop/doc/sphinx-guides/source -[documentation]: http://guides.dataverse.org/en/latest/developers/documentation.html - -## Code/Pull Requests - -We love code contributions. Developers are not limited to the main Dataverse code in this git repo. You can help with API client libraries in your favorite language that are mentioned in the [API Guide][] or create a new library. You can help work on configuration management code that's mentioned in the [Installation Guide][]. The Installation Guide also covers a relatively new concept called "external tools" that allows developers to create their own tools that are available from within an installation of Dataverse. - -[API Guide]: http://guides.dataverse.org/en/latest/api -[Installation Guide]: http://guides.dataverse.org/en/latest/installation - -If you are interested in working on the main Dataverse code, great! Before you start coding, please reach out to us either on the [dataverse-community Google Group][], the [dataverse-dev Google Group][], [chat.dataverse.org][], or via support@dataverse.org to make sure the effort is well coordinated and we avoid merge conflicts. We maintain a list of [community contributors][] and [dev efforts][] the community is working on so please let us know if you'd like to be added or removed from either list. - -Please read http://guides.dataverse.org/en/latest/developers/version-control.html to understand how we use the "git flow" model of development and how we will encourage you to create a GitHub issue (if it doesn't exist already) to associate with your pull request. That page also includes tips on making a pull request. - -After making your pull request, your goal should be to help it advance through our kanban board at https://github.com/orgs/IQSS/projects/34 . If no one has moved your pull request to the code review column in a timely manner, please reach out. Note that once a pull request is created for an issue, we'll remove the issue from the board so that we only track one card (the pull request). - -Thanks for your contribution! - -[dataverse-community Google Group]: https://groups.google.com/group/dataverse-community -[Community Call]: https://dataverse.org/community-calls -[dataverse-dev Google Group]: https://groups.google.com/group/dataverse-dev -[community contributors]: https://docs.google.com/spreadsheets/d/1o9DD-MQ0WkrYaEFTD5rF_NtyL8aUISgURsAXSL7Budk/edit?usp=sharing -[dev efforts]: https://github.com/orgs/IQSS/projects/34/views/6 +[Contributor Guide]: https://guides.dataverse.org/en/latest/contributor/index.html diff --git a/conf/solr/schema.xml b/conf/solr/schema.xml index 5dde750573d..2aed50e9998 100644 --- a/conf/solr/schema.xml +++ b/conf/solr/schema.xml @@ -142,6 +142,7 @@ + @@ -205,6 +206,7 @@ + @@ -350,6 +352,7 @@ + @@ -426,6 +429,7 @@ + @@ -590,6 +594,7 @@ + diff --git a/doc/release-notes/10015-RO-Crate-metadata-file.md b/doc/release-notes/10015-RO-Crate-metadata-file.md deleted file mode 100644 index 4b018a634f7..00000000000 --- a/doc/release-notes/10015-RO-Crate-metadata-file.md +++ /dev/null @@ -1,10 +0,0 @@ -Detection of mime-types based on a filename with extension and detection of the RO-Crate metadata files. - -From now on, filenames with extensions can be added into `MimeTypeDetectionByFileName.properties` file. Filenames added there will take precedence over simply recognizing files by extensions. For example, two new filenames are added into that file: -``` -ro-crate-metadata.json=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" -ro-crate-metadata.jsonld=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" -``` - -Therefore, files named `ro-crate-metadata.json` will be then detected as RO-Crated metadata files from now on, instead as generic `JSON` files. -For more information on the RO-Crate specifications, see https://www.researchobject.org/ro-crate diff --git a/doc/release-notes/10022_upload_redirect_without_tagging.md b/doc/release-notes/10022_upload_redirect_without_tagging.md deleted file mode 100644 index 7ff17f08f4c..00000000000 --- a/doc/release-notes/10022_upload_redirect_without_tagging.md +++ /dev/null @@ -1,5 +0,0 @@ -If your S3 store does not support tagging and gives an error if you configure direct uploads, you can disable the tagging by using the ``dataverse.files..disable-tagging`` JVM option. For more details see https://dataverse-guide--10029.org.readthedocs.build/en/10029/developers/big-data-support.html#s3-tags #10022 and #10029. - -## New config options - -- dataverse.files..disable-tagging diff --git a/doc/release-notes/10116-incomplete-metadata-label-setting.md b/doc/release-notes/10116-incomplete-metadata-label-setting.md deleted file mode 100644 index 769100c3804..00000000000 --- a/doc/release-notes/10116-incomplete-metadata-label-setting.md +++ /dev/null @@ -1 +0,0 @@ -Bug fixed for the ``incomplete metadata`` label being shown for published dataset with incomplete metadata in certain scenarios. This label will now be shown for draft versions of such datasets and published datasets that the user can edit. This label can also be made invisible for published datasets (regardless of edit rights) with the new option ``dataverse.ui.show-validity-label-when-published`` set to `false`. diff --git a/doc/release-notes/10137-2-add-disable-reason-flag.md b/doc/release-notes/10137-2-add-disable-reason-flag.md deleted file mode 100644 index ee5257466ee..00000000000 --- a/doc/release-notes/10137-2-add-disable-reason-flag.md +++ /dev/null @@ -1,6 +0,0 @@ -## Release Highlights - -### Feature flag to remove the required "reason" field in the "Return To Author" dialog - -A reason field, that is required to not be empty, was added in v6.2. Installations that handle author communications through email or another system may prefer to not be required to use this new field. v6.2 includes a new -disable-return-to-author-reason feature flag that can be enabled to drop the reason field from the dialog and make sending a reason optional in the api/datasets/{id}/returnToAuthor call. diff --git a/doc/release-notes/10236-openapi-definition-endpoint.md b/doc/release-notes/10236-openapi-definition-endpoint.md deleted file mode 100644 index 60492c29d78..00000000000 --- a/doc/release-notes/10236-openapi-definition-endpoint.md +++ /dev/null @@ -1,8 +0,0 @@ -In Dataverse 6.0 Payara was updated, which caused the url `/openapi` to stop working: - -- https://github.com/IQSS/dataverse/issues/9981 -- https://github.com/payara/Payara/issues/6369 - -When it worked in Dataverse 5.x, the `/openapi` output was generated automatically by Payara, but in this release we have switched to OpenAPI output produced by the [SmallRye OpenAPI plugin](https://github.com/smallrye/smallrye-open-api/tree/main/tools/maven-plugin). This gives us finer control over the output. - -For more information, see the section on [OpenAPI](https://dataverse-guide--10328.org.readthedocs.build/en/10328/api/getting-started.html#openapi) in the API Guide. diff --git a/doc/release-notes/10242-add-feature-dv-api b/doc/release-notes/10242-add-feature-dv-api deleted file mode 100644 index 5c786554ff9..00000000000 --- a/doc/release-notes/10242-add-feature-dv-api +++ /dev/null @@ -1 +0,0 @@ -New api endpoints have been added to allow you to add or remove featured collections from a dataverse collection. diff --git a/doc/release-notes/10288-add-term_uri-metadata-in-keyword-block.md b/doc/release-notes/10288-add-term_uri-metadata-in-keyword-block.md deleted file mode 100644 index eb3a79dbf25..00000000000 --- a/doc/release-notes/10288-add-term_uri-metadata-in-keyword-block.md +++ /dev/null @@ -1,53 +0,0 @@ -### New keywordTermURI Metadata in keyword Metadata Block - -Adding a new metadata `keywordTermURI` to the `keyword` metadata block to facilitate the integration of controlled vocabulary services, in particular by adding the possibility of saving the "term" and its associated URI. For more information, see #10288 and PR #10371. - -## Upgrade Instructions - -1\. Update the Citation metadata block - -- `wget https://github.com/IQSS/dataverse/releases/download/v6.3/citation.tsv` -- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` - -2\. Update your Solr `schema.xml` to include the new field. - - For details, please see https://guides.dataverse.org/en/latest/admin/metadatacustomization.html#updating-the-solr-schema - - -3\. Reindex Solr. - - Once the schema.xml is updated, Solr must be restarted and a reindex initiated. - For details, see https://guides.dataverse.org/en/latest/admin/solr-search-index.html but here is the reindex command: - - `curl http://localhost:8080/api/admin/index` - - -4\. Run ReExportAll to update dataset metadata exports. Follow the instructions in the [Metadata Export of Admin Guide](https://guides.dataverse.org/en/latest/admin/metadataexport.html#batch-exports-through-the-api). - - -## Notes for Dataverse Installation Administrators - -### Data migration to the new `keywordTermURI` field - -You can migrate your `keywordValue` data containing URIs to the new `keywordTermURI` field. -In case of data migration, view the affected data with the following database query: - -``` -SELECT value FROM datasetfieldvalue dfv -INNER JOIN datasetfield df ON df.id = dfv.datasetfield_id -WHERE df.datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordValue') -AND value ILIKE 'http%'; -``` - -If you wish to migrate your data, a database update is then necessary: - -``` -UPDATE datasetfield df -SET datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordTermURI') -FROM datasetfieldvalue dfv -WHERE dfv.datasetfield_id = df.id -AND df.datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordValue') -AND dfv.value ILIKE 'http%'; -``` - -A ['Reindex in Place'](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) will be required and ReExportAll will need to be run to update the metadata exports of the dataset. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/latest/admin/metadataexport.html#batch-exports-through-the-api). \ No newline at end of file diff --git a/doc/release-notes/10316_cvoc_http_headers.md b/doc/release-notes/10316_cvoc_http_headers.md deleted file mode 100644 index 4b557383a2e..00000000000 --- a/doc/release-notes/10316_cvoc_http_headers.md +++ /dev/null @@ -1,5 +0,0 @@ -You are now able to add HTTP request headers required by the External Vocabulary Services you are implementing. - -A combined documentation can be found on pull request [#10404](https://github.com/IQSS/dataverse/pull/10404). - -For more information, see issue [#10316](https://github.com/IQSS/dataverse/issues/10316) and pull request [gddc/dataverse-external-vocab-support#19](https://github.com/gdcc/dataverse-external-vocab-support/pull/19). diff --git a/doc/release-notes/10330-api-change-latest-version-status.md b/doc/release-notes/10330-api-change-latest-version-status.md deleted file mode 100644 index 6e6a018fe12..00000000000 --- a/doc/release-notes/10330-api-change-latest-version-status.md +++ /dev/null @@ -1 +0,0 @@ -The API endpoint for getting the Dataset version has been extended to include latestVersionPublishingStatus. \ No newline at end of file diff --git a/doc/release-notes/10339-workflow.md b/doc/release-notes/10339-workflow.md deleted file mode 100644 index 90d08dabb1f..00000000000 --- a/doc/release-notes/10339-workflow.md +++ /dev/null @@ -1,3 +0,0 @@ -The computational workflow metadata block has been updated to present a clickable link for the External Code Repository URL field. - -Release notes should include the usual instructions, for those who have installed this optional block, to update the computational_workflow block. (PR#10441) \ No newline at end of file diff --git a/doc/release-notes/10389-metadatablocks-api-extension.md b/doc/release-notes/10389-metadatablocks-api-extension.md deleted file mode 100644 index 9b14100d33c..00000000000 --- a/doc/release-notes/10389-metadatablocks-api-extension.md +++ /dev/null @@ -1,6 +0,0 @@ -New optional query parameters added to ``api/metadatablocks`` and ``api/dataverses/{id}/metadatablocks`` endpoints: - -- ``returnDatasetFieldTypes``: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. -- ``onlyDisplayedOnCreate``: Whether or not to return only the metadata blocks that are displayed on dataset creation. If ``returnDatasetFieldTypes`` is true, only the dataset field types shown on dataset creation will be returned within each metadata block. If not set, the default value is false. - -Added new ``displayOnCreate`` field to the MetadataBlock and DatasetFieldType payloads. diff --git a/doc/release-notes/10415-fix-api-performance-issues-on-large-datasets.md b/doc/release-notes/10415-fix-api-performance-issues-on-large-datasets.md deleted file mode 100644 index e8840e9d4f7..00000000000 --- a/doc/release-notes/10415-fix-api-performance-issues-on-large-datasets.md +++ /dev/null @@ -1,4 +0,0 @@ -For scenarios involving API calls related to large datasets (Numerous files, for example: ~10k) it has been optimized: - -- The search API endpoint. -- The permission checking logic present in PermissionServiceBean. diff --git a/doc/release-notes/10425-add-MIT-License.md b/doc/release-notes/10425-add-MIT-License.md deleted file mode 100644 index 95d6fb38ded..00000000000 --- a/doc/release-notes/10425-add-MIT-License.md +++ /dev/null @@ -1,3 +0,0 @@ -A new file has been added to import the MIT License to Dataverse: licenseMIT.json. - -Documentation has been added to explain the procedure for adding new licenses to the guides. diff --git a/doc/release-notes/10464-add-name-harvesting-client-facet.md b/doc/release-notes/10464-add-name-harvesting-client-facet.md deleted file mode 100644 index 1fc0bb47caf..00000000000 --- a/doc/release-notes/10464-add-name-harvesting-client-facet.md +++ /dev/null @@ -1,3 +0,0 @@ -The Metadata Source facet has been updated to show the name of the harvesting client rather than grouping all such datasets under 'harvested' - -TODO: for the v6.13 release note: Please add a full re-index using http://localhost:8080/api/admin/index to the upgrade instructions. diff --git a/doc/release-notes/10466-math-challenge-403-error-page.md b/doc/release-notes/10466-math-challenge-403-error-page.md deleted file mode 100644 index 160c760dc9d..00000000000 --- a/doc/release-notes/10466-math-challenge-403-error-page.md +++ /dev/null @@ -1 +0,0 @@ -On forbidden access error page, also know as 403 error page, the math challenge is now correctly display to submit the contact form. diff --git a/doc/release-notes/10468-doc-datalad-integration.md b/doc/release-notes/10468-doc-datalad-integration.md deleted file mode 100644 index cd4d2d53a5f..00000000000 --- a/doc/release-notes/10468-doc-datalad-integration.md +++ /dev/null @@ -1 +0,0 @@ -DataLad has been integrated with Dataverse. For more information, see https://dataverse-guide--10470.org.readthedocs.build/en/10470/admin/integrations.html#datalad diff --git a/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md b/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md deleted file mode 100644 index 77cc7f59773..00000000000 --- a/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md +++ /dev/null @@ -1,3 +0,0 @@ -Changed ``api/dataverses/{id}/metadatablocks`` so that setting the query parameter ``onlyDisplayedOnCreate=true`` also returns metadata blocks with dataset field type input levels configured as required on the General Information page of the collection, in addition to the metadata blocks and their fields with the property ``displayOnCreate=true`` (which was the original behavior). - -A new endpoint ``api/dataverses/{id}/inputLevels`` has been created for updating the dataset field type input levels of a collection via API. diff --git a/doc/release-notes/10491-add-isreleased-to-get-dataverse-response.md b/doc/release-notes/10491-add-isreleased-to-get-dataverse-response.md deleted file mode 100644 index 5293c7267d0..00000000000 --- a/doc/release-notes/10491-add-isreleased-to-get-dataverse-response.md +++ /dev/null @@ -1,22 +0,0 @@ -The Dataverse object returned by /api/dataverses has been extended to include "isReleased": {boolean}. -```javascript -{ - "status": "OK", - "data": { - "id": 32, - "alias": "dv6f645bb5", - "name": "dv6f645bb5", - "dataverseContacts": [ - { - "displayOrder": 0, - "contactEmail": "54180268@mailinator.com" - } - ], - "permissionRoot": true, - "dataverseType": "UNCATEGORIZED", - "ownerId": 1, - "creationDate": "2024-04-12T18:05:59Z", - "isReleased": true - } -} -``` \ No newline at end of file diff --git a/doc/release-notes/10494-payara-upgrade.md b/doc/release-notes/10494-payara-upgrade.md deleted file mode 100644 index 23ee0e698f7..00000000000 --- a/doc/release-notes/10494-payara-upgrade.md +++ /dev/null @@ -1,119 +0,0 @@ -# Upgrade Payara to v6.2024.6 - -With this version of Dataverse, we encourage you to upgrade to version 6.2024.6. -This will address security issues accumulated since the release of 6.2023.8, which was required since Dataverse release 6.0. - -## Instructions for Upgrading - -If you are using GDCC containers, this upgrade is included when pulling new release images. -No manual intervention is necessary. - -We recommend you ensure you followed all update instructions from the past releases regarding Payara. -(Latest Payara update was for [v6.0](https://github.com/IQSS/dataverse/releases/tag/v6.0)) - -Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. - -The steps below are a simple matter of reusing your existing domain directory with the new distribution. -But we also recommend that you review the Payara upgrade instructions as it could be helpful during any troubleshooting: -[Payara Release Notes](https://docs.payara.fish/community/docs/Release%20Notes/Release%20Notes%206.2024.6.html) -We assume you are already on a Dataverse 6.x installation, using a Payara 6.x release. - -```shell -export PAYARA=/usr/local/payara6 -``` - -(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) - -1\. Undeploy the previous version - -```shell - $PAYARA/bin/asadmin list-applications - $PAYARA/bin/asadmin undeploy dataverse<-version> -``` - -2\. Stop Payara - -```shell - service payara stop - rm -rf $PAYARA/glassfish/domains/domain1/generated - rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache - rm -rf $PAYARA/glassfish/domains/domain1/lib/databases -``` - -3\. Move the current Payara directory out of the way - -```shell - mv $PAYARA $PAYARA.MOVED -``` - -4\. Download the new Payara version (6.2024.6), and unzip it in its place - -5\. Replace the brand new payara/glassfish/domains/domain1 with your old, preserved domain1 - -6\. Make sure that you have the following `--add-opens` options in your domain.xml. If not present, add them: - -```diff ---- payara-6.2023.8/glassfish/domains/domain1/config/domain.xml -+++ payara-6.2024.6/glassfish/domains/domain1/config/domain.xml -@@ -212,12 +212,16 @@ - --add-opens=java.naming/javax.naming.spi=ALL-UNNAMED - --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED - --add-opens=java.logging/java.util.logging=ALL-UNNAMED -+ --add-opens=java.management/javax.management=ALL-UNNAMED -+ --add-opens=java.management/javax.management.openmbean=ALL-UNNAMED - [17|]--add-exports=java.base/sun.net.www=ALL-UNNAMED - [17|]--add-exports=java.base/sun.security.util=ALL-UNNAMED - [17|]--add-opens=java.base/java.lang.invoke=ALL-UNNAMED - [17|]--add-opens=java.desktop/java.beans=ALL-UNNAMED - [17|]--add-exports=jdk.naming.dns/com.sun.jndi.dns=ALL-UNNAMED - [17|]--add-exports=java.naming/com.sun.jndi.ldap=ALL-UNNAMED -+ [17|]--add-opens=java.base/java.io=ALL-UNNAMED -+ [21|]--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED - -Xmx512m - -XX:NewRatio=2 - -XX:+UnlockDiagnosticVMOptions -@@ -447,12 +451,16 @@ - --add-opens=java.naming/javax.naming.spi=ALL-UNNAMED - --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED - --add-opens=java.logging/java.util.logging=ALL-UNNAMED -+ --add-opens=java.management/javax.management=ALL-UNNAMED -+ --add-opens=java.management/javax.management.openmbean=ALL-UNNAMED - [17|]--add-exports=java.base/sun.net.www=ALL-UNNAMED - [17|]--add-exports=java.base/sun.security.util=ALL-UNNAMED - [17|]--add-opens=java.base/java.lang.invoke=ALL-UNNAMED - [17|]--add-opens=java.desktop/java.beans=ALL-UNNAMED - [17|]--add-exports=jdk.naming.dns/com.sun.jndi.dns=ALL-UNNAMED - [17|]--add-exports=java.naming/com.sun.jndi.ldap=ALL-UNNAMED -+ [17|]--add-opens=java.base/java.io=ALL-UNNAMED -+ [21|]--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED - -Xmx512m - -XX:NewRatio=2 - -XX:+UnlockDiagnosticVMOptions -``` -(You can also save this as a patch file and try to apply it.) - -TODO: For the combined 6.3 release note, I would consider replacing the patch format above with just the 4 specific options, for clarity etc. (L.A.) As in: -``` - --add-opens=java.management/javax.management=ALL-UNNAMED - --add-opens=java.management/javax.management.openmbean=ALL-UNNAMED - [17|]--add-opens=java.base/java.io=ALL-UNNAMED - [21|]--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED -``` - -7\. Start Payara - -```shell - service payara start -``` - -8\. Deploy this version. - -```shell - $PAYARA/bin/asadmin deploy dataverse-6.3.war -``` - -9\. Restart payara - -```shell - service payara stop - service payara start diff --git a/doc/release-notes/10503-cvoc-hidden-html-fields.md b/doc/release-notes/10503-cvoc-hidden-html-fields.md deleted file mode 100644 index e3ea0463fb8..00000000000 --- a/doc/release-notes/10503-cvoc-hidden-html-fields.md +++ /dev/null @@ -1,11 +0,0 @@ -## Release Highlights - -### Updates on Support for External Vocabulary Services - -#### Hidden HTML Fields - -External Controlled Vocabulary scripts, configured via [:CVocConf](https://guides.dataverse.org/en/6.3/installation/config.html#cvocconf), can now access the values of managed fields as well as the term-uri-field for use in constructing the metadata view for a dataset. - -Those values are hidden and can be found with the html attribute `data-cvoc-metadata-name`. - -For more information, see [#10503](https://github.com/IQSS/dataverse/pull/10503). diff --git a/doc/release-notes/10547-solr-updates.md b/doc/release-notes/10547-solr-updates.md deleted file mode 100644 index a21809c6369..00000000000 --- a/doc/release-notes/10547-solr-updates.md +++ /dev/null @@ -1 +0,0 @@ -Multiple improvements have ben made to they way Solr indexing and searching is done. Response times should be significantly improved. See the individual PRs in this release for details. \ No newline at end of file diff --git a/doc/release-notes/10554-avoid-solr-join-guest.md b/doc/release-notes/10554-avoid-solr-join-guest.md deleted file mode 100644 index 956c658dbed..00000000000 --- a/doc/release-notes/10554-avoid-solr-join-guest.md +++ /dev/null @@ -1,5 +0,0 @@ -Two experimental features flag called "add-publicobject-solr-field" and "avoid-expensive-solr-join" have been added to change how Solr documents are indexed for public objects and how Solr queries are constructed to accommodate access to restricted content (drafts, etc.). It is hoped that it will help with performance, especially on large instances and under load. - -Before the search feature flag ("avoid-expensive...") can be turned on, the indexing flag must be enabled, and a full reindex performed. Otherwise publicly available objects are NOT going to be shown in search results. - -For details see https://dataverse-guide--10555.org.readthedocs.build/en/10555/installation/config.html#feature-flags and #10555. diff --git a/doc/release-notes/10561-3dviewer.md b/doc/release-notes/10561-3dviewer.md deleted file mode 100644 index 47da10f8837..00000000000 --- a/doc/release-notes/10561-3dviewer.md +++ /dev/null @@ -1 +0,0 @@ -3DViewer by openforestdata.pl has been added to the list of external tools: https://preview.guides.gdcc.io/en/develop/admin/external-tools.html#inventory-of-external-tools diff --git a/doc/release-notes/10565-banner-test-improvements.md b/doc/release-notes/10565-banner-test-improvements.md deleted file mode 100644 index d9030f2a0c3..00000000000 --- a/doc/release-notes/10565-banner-test-improvements.md +++ /dev/null @@ -1 +0,0 @@ -The endpoint `api/admin/bannerMessage` has been extended so the ID is returned when created \ No newline at end of file diff --git a/doc/release-notes/10568-Fix File Reingest.md b/doc/release-notes/10568-Fix File Reingest.md deleted file mode 100644 index 354aa847f01..00000000000 --- a/doc/release-notes/10568-Fix File Reingest.md +++ /dev/null @@ -1 +0,0 @@ -A bug that prevented the Ingest option in the File page Edit File menu from working has been fixed \ No newline at end of file diff --git a/doc/release-notes/10579-avoid-solr-deletes.md b/doc/release-notes/10579-avoid-solr-deletes.md deleted file mode 100644 index 1062a2fb78f..00000000000 --- a/doc/release-notes/10579-avoid-solr-deletes.md +++ /dev/null @@ -1,9 +0,0 @@ -A features flag called "reduce-solr-deletes" has been added to improve how datafiles are indexed. When the flag is enabled, -Dataverse wil avoid pre-emptively deleting existing solr documents for the files prior to sending updated information. This -should improve performance and will allow additional optimizations going forward. - -The /api/admin/index/status and /api/admin/index/clear-orphans calls -(see https://guides.dataverse.org/en/latest/admin/solr-search-index.html#index-and-database-consistency) -will now find and remove (respectively) additional permissions related solr documents that were not being detected before. -Reducing the overall number of documents will improve solr performance and large sites may wish to periodically call the -clear-orphans API. \ No newline at end of file diff --git a/doc/release-notes/10611-harvested-origin-facet.md b/doc/release-notes/10611-harvested-origin-facet.md deleted file mode 100644 index 89ab6eb7639..00000000000 --- a/doc/release-notes/10611-harvested-origin-facet.md +++ /dev/null @@ -1,10 +0,0 @@ -NOTE that this release note supercedes the 10464-add-name-harvesting-client-facet.md note from the PR 10464. - -An option has been added to index the name of the Harvesting Client as the "Metadata Source" of harvested datasets and files; if enabled, the Metadata Source facet will be showing separate entries for the content harvested from different sources, instead of the current, default behavior where there is one "Harvested" facet for all such content. - - -TODO: for the v6.3 release note: -If you choose to enable the extended "Metadata Souce" facet for harvested content, set the optional feature flage (jvm option) `dataverse.feature.index-harvested-metadata-source=true` before reindexing. - -[Please note that the upgrade instruction in 6.3 will contain a suggestion to run full reindex, as part of the Solr upgrade, so the sentence above will need to be added to that section] - diff --git a/doc/release-notes/10697-improve-permission-indexing.md b/doc/release-notes/10697-improve-permission-indexing.md new file mode 100644 index 00000000000..b232b1c4d3c --- /dev/null +++ b/doc/release-notes/10697-improve-permission-indexing.md @@ -0,0 +1,7 @@ +### Reindexing after a role assignment is less memory intensive + +Adding/removing a user from a role on a collection, particularly the root collection, could lead to a significant increase in memory use resulting in Dataverse itself failing with an out-of-memory condition. Such changes now consume much less memory. + +If you have experienced out-of-memory failures in Dataverse in the past that could have been caused by this problem, you may wish to run a [reindex in place](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) to update any out-of-date information. + +For more information, see #10697 and #10698. diff --git a/doc/release-notes/10857-add-expiration-date-to-recreate-token-api.md b/doc/release-notes/10857-add-expiration-date-to-recreate-token-api.md new file mode 100644 index 00000000000..b450867c630 --- /dev/null +++ b/doc/release-notes/10857-add-expiration-date-to-recreate-token-api.md @@ -0,0 +1 @@ +An optional query parameter called 'returnExpiration' has been added to the 'users/token/recreate' endpoint, which, if set to true, returns the expiration time in the response message. diff --git a/doc/release-notes/10901deaccessioned file edit fix.md b/doc/release-notes/10901deaccessioned file edit fix.md new file mode 100644 index 00000000000..db12b1fc978 --- /dev/null +++ b/doc/release-notes/10901deaccessioned file edit fix.md @@ -0,0 +1 @@ +When a dataset was deaccessioned and was the only previous version it will cause an error when trying to update the files. \ No newline at end of file diff --git a/doc/release-notes/10939-i18n-docker.md b/doc/release-notes/10939-i18n-docker.md new file mode 100644 index 00000000000..d9887b684db --- /dev/null +++ b/doc/release-notes/10939-i18n-docker.md @@ -0,0 +1,5 @@ +## Multiple Language in Docker + +Configuration and documentation has been added to explain how to set up multiple languages (e.g. English and French) in the tutorial for setting up Dataverse in Docker. + +See also #10939 diff --git a/doc/release-notes/5621_dataset image in header.md b/doc/release-notes/5621_dataset image in header.md deleted file mode 100644 index 34b445fd9e1..00000000000 --- a/doc/release-notes/5621_dataset image in header.md +++ /dev/null @@ -1 +0,0 @@ -Dataverse will use the Dataset thumbnail, if one is defined, rather than the generic Dataverse logo in the Open Graph metadata header. This means the image will be seen when, for example, the dataset is referenced in Facebook. diff --git a/doc/release-notes/6.3-release-notes.md b/doc/release-notes/6.3-release-notes.md new file mode 100644 index 00000000000..6cad513690d --- /dev/null +++ b/doc/release-notes/6.3-release-notes.md @@ -0,0 +1,470 @@ +# Dataverse 6.3 + +Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.3 rather than the list of releases, which will cut them off. + +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +# Table of Contents +- [Release Highlights](#release-highlights) +- [Features](#features) +- [Bug Fixes](#bug-fixes) +- [API](#api) +- [Settings](#settings) +- [Complete List of Changes](#complete-list-of-changes) +- [Getting Help](#getting-help) +- [Upgrade instructions](#upgrade-instructions) + +## Release Highlights + +### Solr Search and Indexing Improvements + +Multiple improvements have been made to the way Solr indexing and searching is done. Response times should be significantly improved. + +- Two experimental features flag called "add-publicobject-solr-field" and "avoid-expensive-solr-join" have been added to change how Solr documents are indexed for public objects and how Solr queries are constructed to accommodate access to restricted content (drafts, etc.). It is hoped that it will help with performance, especially on large instances and under load. + +- Before the search feature flag ("avoid-expensive...") can be turned on, the indexing flag must be enabled, and a full reindex performed. Otherwise publicly available objects are NOT going to be shown in search results. + +- A feature flag called "reduce-solr-deletes" has been added to improve how datafiles are indexed. When the flag is enabled, Dataverse will avoid pre-emptively deleting existing Solr documents for the files prior to sending updated information. This +should improve performance and will allow additional optimizations going forward. + +- The /api/admin/index/status and /api/admin/index/clear-orphans calls +(see https://guides.dataverse.org/en/latest/admin/solr-search-index.html#index-and-database-consistency) +will now find and remove (respectively) additional permissions related Solr documents that were not being detected before. +Reducing the overall number of documents will improve Solr performance and large sites may wish to periodically call the "clear-orphans" API. + +- Dataverse now relies on the autoCommit and autoSoftCommit settings in the Solr configuration instead of explicitly committing documents to the Solr index. This improves indexing speed. + +See also #10554, #10654, and #10579. + +### File Retention Period + +Dataverse now supports file-level retention periods. The ability to set retention periods, with a minimum duration (in months), can be configured by a Dataverse installation administrator. For more information, see the [Retention Periods section](https://guides.dataverse.org/en/6.3/user/dataset-management.html#retention-periods) of the User Guide. + +- Users can configure a specific retention period, defined by an end date and a short reason, on a set of selected files or an individual file, by selecting the "Retention Period" menu item and entering information in a popup dialog. Retention periods can only be set, changed, or removed before a file has been published. After publication, only Dataverse installation administrators can make changes, using an API. + +- After the retention period expires, files can not be previewed or downloaded (as if restricted, with no option to allow access requests). The file (landing) page and all the metadata remains available. + +## Features + +### Large Datasets Improvements + +For scenarios involving API calls related to large datasets (numerous files, for example: ~10k) the following have been been optimized: + +- The Search API endpoint. +- The permission checking logic present in PermissionServiceBean. + +See also [#10415](https://github.com/IQSS/dataverse/pull/10415). + +### Improved Controlled Vocabulary for Citation Block + +The Controlled Vocabuary Values list for the "Language" metadata field in the citation block has been improved, with some missing two- and three-letter ISO 639 codes added, as well as more alternative names for some of the languages, making all these extra language identifiers importable. See also [#8243](https://github.com/IQSS/dataverse/pull/8243). + +### Updates on Support for External Vocabulary Services + +Multiple extensions of the external vocabulary mechanism have been added. These extensions allow interaction with services based on the Ontoportal software and are expected to be generally useful for other service types. + +These changes include: + +- *Improved Indexing with Compound Fields:* When using an external vocabulary service with compound fields, you can now specify which field(s) will include additional indexed information, such as translations of an entry into other languages. This is done by adding the `indexIn` in `retrieval-filtering`. See also [#10505](https://github.com/IQSS/dataverse/pull/10505) and [GDCC/dataverse-external-vocab-support documentation](https://github.com/gdcc/dataverse-external-vocab-support/tree/main/docs). + +- *Broader Support for Indexing Service Responses:* Indexing of the results from `retrieval-filtering` responses can now handle additional formats including JSON arrays of strings and values from arbitrary keys within a JSON Object. See [#10505](https://github.com/IQSS/dataverse/pull/10505). + +- *HTTP Headers:* You are now able to add HTTP request headers required by the service you are implementing. See [#10331](https://github.com/IQSS/dataverse/pull/10331). + +- *Flexible params in retrievalUri:* You can now use `managed-fields` field names as well as the `term-uri-field` field name as parameters in the `retrieval-uri` when configuring an external vocabulary service. `{0}` as an alternative to using the `term-uri-field` name is still supported for backward compatibility. Also you can specify if the value must be url encoded with `encodeUrl:`. See [#10404](https://github.com/IQSS/dataverse/pull/10404). + + For example : `"retrieval-uri": "https://data.agroportal.lirmm.fr/ontologies/{keywordVocabulary}/classes/{encodeUrl:keywordermURL}"` + +- *Hidden HTML Fields* External controlled vocabulary scripts, configured via [:CVocConf](https://guides.dataverse.org/en/6.3/installation/config.html#cvocconf), can now access the values of managed fields as well as the term-uri-field for use in constructing the metadata view for a dataset. These values are now added as hidden elements in the HTML and can be found with the HTML attribute `data-cvoc-metadata-name`. See also [#10503](https://github.com/IQSS/dataverse/pull/10503). + +### A Contributor Guide is now available + +A new [Contributor Guide](https://guides.dataverse.org/en/6.3/contributor/index.html) has been added by the UX Working Group (#10531 and #10532). + +### URL Validation Is More Permissive + +URL validation now allows two slashes in the path component of the URL. +Among other things, this allows metadata fields of `url` type to be filled with more complex url such as https://archive.softwareheritage.org/browse/directory/561bfe6698ca9e58b552b4eb4e56132cac41c6f9/?origin_url=https://github.com/gem-pasteur/macsyfinder&revision=868637fce184865d8e0436338af66a2648e8f6e1&snapshot=1bde3cb370766b10132c4e004c7cb377979928d1 + +See also #9750 and [#9739](https://github.com/IQSS/dataverse/pull/9739) + +### Improved Detection of RO-Crate Files + +Detection of mime-types based on a filename with extension and detection of the RO-Crate metadata files. + +From now on, filenames with extensions can be added into `MimeTypeDetectionByFileName.properties` file. Filenames added there will take precedence over simply recognizing files by extensions. For example, two new filenames are added into that file: +``` +ro-crate-metadata.json=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" +ro-crate-metadata.jsonld=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" +``` + +Therefore, files named `ro-crate-metadata.json` will be then detected as RO-Crated metadata files from now on, instead as generic `JSON` files. +For more information on the RO-Crate specifications, see https://www.researchobject.org/ro-crate + +See also [#10015](https://github.com/IQSS/dataverse/pull/10015). + +### New S3 Tagging Configuration Option + +If your S3 store does not support tagging and gives an error if you configure direct upload, you can disable the tagging by using the `dataverse.files..disable-tagging` JVM option. For more details, see the section on [S3 tags](https://guides.dataverse.org/en/6.3/developers/big-data-support.html#s3-tags) in the guides, #10022 and #10029. + +### Feature Flag To Remove the Required "Reason" Field in the "Return to Author" Dialog + +A reason field, that is required to not be empty, was added to the "Return to Author" dialog in v6.2. Installations that handle author communications through email or another system may prefer to not be required to use this new field. v6.3 includes a new +disable-return-to-author-reason feature flag that can be enabled to drop the reason field from the dialog and make sending a reason optional in the api/datasets/{id}/returnToAuthor call. See also #10655. + +### Improved Use of Dataverse Thumbnail + +Dataverse will use the dataset thumbnail, if one is defined, rather than the generic Dataverse logo in the Open Graph metadata header. This means the image will be seen when, for example, the dataset is referenced on Facebook. See also [#5621](https://github.com/IQSS/dataverse/pull/5621). + +### Improved Email Notifications When Guestbook is Used for File Access Requests + +Multiple improvements to guestbook response emails making it easier to organize and process them. The subject line of the notification email now includes the name and user identifier of the requestor. Additionally, the body of the email now includes the user id of the requestor. Finally the guestbook responses have been sorted and spaced to improve readability. See also [#10581](https://github.com/IQSS/dataverse/issues/10581). + +### New keywordTermURI Metadata Field in the Citation Metadata Block + +A new metadata field - `keywordTermURI`, has been added in the citation metadata block (as a fourth child field under the `keyword` parent field). This has been done to improve usability and to facilitate the integration of controlled vocabulary services, adding the possibility of saving the "term" and/or its associated URI. For more information, see #10288 and PR #10371. + +### Updated Computational Workflow Metadata Block + +The computational workflow metadata block has been updated to present a clickable link for the External Code Repository URL field. See also [#10339](https://github.com/IQSS/dataverse/pull/10339). + +### Metadata Source Facet Added + +An option has been added to index the name of the harvesting client as the "Metadata Source" of harvested datasets and files; if enabled, the Metadata Source facet will show separate entries for the content harvested from different sources, instead of the current, default behavior where there is one "Harvested" facet for all such content. + +Tho enable this feature, set the optional feature flage (jvm option) `dataverse.feature.index-harvested-metadata-source=true` before reindexing. + +See also [#10611](https://github.com/IQSS/dataverse/pull/10611) and #10651. + +### Additional Facet Settings + +Extra settings have been added giving an instance admin more choices in selectively limiting the availability of search facets on the collection and dataset pages. + +See [Disable Solr Facets](https://guides.dataverse.org/en/6.3/installation/config.html#DisableSolrFacets) under the configuration section of the Installation Guide for more info as well as [#10570](https://github.com/IQSS/dataverse/pull/10570). + +### Sitemap Now Supports More Than 50k Items + +Dataverse can now handle more than 50,000 items when generating sitemap files, splitting the content across multiple files to comply with the Sitemap protocol. For details, see the [sitemap section](https://guides.dataverse.org/en/6.3/installation/config.html#creating-a-sitemap-and-submitting-it-to-search-engines) of the Installation Guide. See also [#8936](https://github.com/IQSS/dataverse/pull/8936) and [#10321](https://github.com/IQSS/dataverse/pull/10321). + +### MIT and Apache 2.0 Licenses Added + +New files have been added to import the MIT and Apache 2.0 Licenses to Dataverse: + +- licenseMIT.json +- licenseApache-2.0.json + +Guidance has been added to the [guides](https://guides.dataverse.org/en/6.2/installation/config.html#adding-custom-licenses) to explain the procedure for adding new licenses to Dataverse. + +See also [#10425](https://github.com/IQSS/dataverse/pull/10425). + +### 3D Viewer by Open Forest Data + +3DViewer by openforestdata.pl has been added to the [list of external tools](https://guides.dataverse.org/en/6.3/admin/external-tools.html#inventory-of-external-tools). See also [#10561](https://github.com/IQSS/dataverse/pull/10561). + +### Datalad Integration With Dataverse + +DataLad has been integrated with Dataverse. For more information, see the [integrations](https://guides.dataverse.org/en/6.3/admin/integrations.html#datalad) section of the guides. See also [#10468](https://github.com/IQSS/dataverse/pull/10468). + +### Rsync Support Has Been Deprecated + +Support for rsync has been deprecated. Information has been removed from the guides for rsync and related software such as Data Capture Module (DCM) and Repository Storage Abstraction Layer (RSAL). You can still find this information in [older versions](https://guides.dataverse.org/en/6.2/developers/big-data-support.html#data-capture-module-dcm) of the guides. See [Settings](#database-settings), below, for deprecated settings. See also [#8985](https://github.com/IQSS/dataverse/pull/8985). + +[↑ Table of Contents](#table-of-contents) + +## Bug Fixes + +### OpenAPI Re-Enabled + +In Dataverse 6.0 when Payara was updated it caused the url `/openapi` to stop working: + +- https://github.com/IQSS/dataverse/issues/9981 +- https://github.com/payara/Payara/issues/6369 + +In addition to fixing the `/openapi` URL, we are also making some changes on how we provide the OpenAPI document: + +When it worked in Dataverse 5.x, the `/openapi` output was generated automatically by Payara, but in this release we have switched to OpenAPI output produced by the [SmallRye OpenAPI plugin](https://github.com/smallrye/smallrye-open-api/tree/main/tools/maven-plugin). This gives us finer control over the output. + +For more information, see the section on [OpenAPI](https://guides.dataverse.org/en/6.3/getting-started.html#openapi) in the API Guide and #10328. + +### Re-Addition of "Cell Counting" to Life Sciences Block + +In the Life Sciences metadata block under the "Measurement Type" field the value `cell counting` was accidentally removed in v5.1. It has been restored. See also #8655 and #9735. + +### Math Challenge Fixed on 403 Error Page + +On the "forbidden" (403) error page, the math challenge now correctly displays so that the contact form can be submitted. See also [#10466](https://github.com/IQSS/dataverse/pull/10466). + +### Ingest Option Bug Fixed + +A bug that prevented the "Ingest" option in the file page "Edit File" menu from working has been fixed. See also [#10568](https://github.com/IQSS/dataverse/pull/10568). + +### Incomplete Metadata Bug Fix + +A bug was fixed where the `incomplete metadata` label was being shown for published dataset with incomplete metadata in certain scenarios. This label will now be shown for draft versions of such datasets and published datasets that the user can edit. This label can also be made invisible for published datasets (regardless of edit rights) with the new option ``dataverse.ui.show-validity-label-when-published`` set to `false`. See also [#10116](https://github.com/IQSS/dataverse/pull/10116). + +### Identical Role Error Message + +An error is now correctly reported when an attempt is made to assign an identical role to the same collection, dataset, or file. See also [#9729](https://github.com/IQSS/dataverse/pull/9729) and #10465. + +[↑ Table of Contents](#table-of-contents) + +## API + +### Superuser Endpoint + +The existing API endpoint for toggling the superuser status of a user has been deprecated in favor of a new API endpoint that allows you to explicitly and idempotently set the status as true or false. For details, see the [API Guide](https://guides.dataverse.org/en/6.3/api/native-api.html#set-superuser-status), [#9887](https://github.com/IQSS/dataverse/pull/9887) and [#10440](https://github.com/IQSS/dataverse/pull/10440). + +### New Featured Collections Endpoints + +New API endpoints have been added to allow you to add or remove featured collections from a collection. + +See also the sections on [listing, setting, and removing](https://guides.dataverse.org/en/6.3/api/native-api.html#list-featured-collections-for-a-dataverse-collection) featured collections in the API Guide, [#10242](https://github.com/IQSS/dataverse/pull/10242) and #10459. + +### Dataset Version Endpoint Extended + +The API endpoint for getting the Dataset version has been extended to include latestVersionPublishingStatus. See also [#10330](https://github.com/IQSS/dataverse/pull/10330). + +### New Optional Query Parameters for Metadatablocks Endpoints + +New optional query parameters have been added to `api/metadatablocks` and `api/dataverses/{id}/metadatablocks` endpoints: + +- `returnDatasetFieldTypes`: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. +- Setting the query parameter `onlyDisplayedOnCreate=true` also returns metadata blocks with dataset field type input levels configured as required on the General Information page of the collection, in addition to the metadata blocks and their fields with the property ``displayOnCreate=true``. + +See also [#10389](https://github.com/IQSS/dataverse/pull/10389) + +### Dataverse Payload Includes Release Status + +The Dataverse object returned by /api/dataverses has been extended to include "isReleased": {boolean}. See also [#10491](https://github.com/IQSS/dataverse/pull/10491). + +### New Field Type Input Level Endpoint + +A new endpoint ``api/dataverses/{id}/inputLevels`` has been created for updating the dataset field type input levels of a collection via API. See also [#10477](https://github.com/IQSS/dataverse/pull/10477). + +### Banner Message Endpoint Extended + +The endpoint `api/admin/bannerMessage` has been extended so the ID is returned when created. See also [#10565](https://github.com/IQSS/dataverse/pull/10565). + +[↑ Table of Contents](#table-of-contents) + +## Settings + +### Database Settings: + +***New:*** + +- :DisableSolrFacets + +***Deprecated (used with rsync):*** + +- :DataCaptureModuleUrl +- :DownloadMethods +- :LocalDataAccessPath +- :RepositoryStorageAbstractionLayerUrl + +### New Configuration Options + +- `dataverse.files..disable-tagging` +- `dataverse.feature.add-publicobject-solr-field` +- `dataverse.feature.avoid-expensive-solr-join` +- `dataverse.feature.reduce-solr-deletes` +- `dataverse.feature.disable-return-to-author-reason` +- `dataverse.feature.index-harvested-metadata-source` +- `dataverse.ui.show-validity-label-when-published` + +[↑ Table of Contents](#table-of-contents) + +## Complete List of Changes + +For the complete list of code changes in this release, see the [6.3 Milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.3+is%3Aclosed) in GitHub. + +[↑ Table of Contents](#table-of-contents) + +## Getting Help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/g/dataverse-community) or email support@dataverse.org. + +[↑ Table of Contents](#table-of-contents) + +## Upgrade Instructions + +Upgrading requires a maintenance window and downtime. Please plan accordingly, create backups of your database, etc. + +These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.2. + +0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only supported way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. + +`export PAYARA=/usr/local/payara6` + +(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) + +1\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin undeploy dataverse-6.2` + +2\. Stop Payara and remove the following directories: + +```shell +service payara stop +rm -rf $PAYARA/glassfish/domains/domain1/generated +rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache +rm -rf $PAYARA/glassfish/domains/domain1/lib/databases +``` + +3\. Upgrade Payara to v6.2024.6 + +With this version of Dataverse, we encourage you to upgrade to version 6.2024.6. +This will address security issues accumulated since the release of 6.2023.8. + +Note that if you are using GDCC containers, this upgrade is included when pulling new release images. +No manual intervention is necessary. + +The steps below are a simple matter of reusing your existing domain directory with the new distribution. +But we recommend that you review the Payara upgrade instructions as it could be helpful during any troubleshooting: +[Payara Release Notes](https://docs.payara.fish/community/docs/Release%20Notes/Release%20Notes%206.2024.6.html). +We also recommend you ensure you followed all update instructions from the past releases regarding Payara. +(The latest Payara update was for [v6.0](https://github.com/IQSS/dataverse/releases/tag/v6.0).) + +Move the current Payara directory out of the way: + +```shell +mv $PAYARA $PAYARA.6.2023.8 +``` + +Download the new Payara version 6.2024.6 (from https://www.payara.fish/downloads/payara-platform-community-edition/), and unzip it in its place: + +```shell +cd /usr/local +unzip payara-6.2024.6.zip +``` + +Replace the brand new `payara/glassfish/domains/domain1` with your old, preserved domain1: + +```shell +mv payara6/glassfish/domains/domain1 payara6/glassfish/domains/domain1_DIST +mv payara6-2023.8/glassfish/domains/domain1 payara6/glassfish/domains/ +``` + +Make sure that you have the following `--add-opens` options in your `payara6/glassfish/domains/domain1/config/domain.xml`. If not present, add them: + +``` +--add-opens=java.management/javax.management=ALL-UNNAMED +--add-opens=java.management/javax.management.openmbean=ALL-UNNAMED +[17|]--add-opens=java.base/java.io=ALL-UNNAMED +[21|]--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED +``` + +(Note that you likely already have the `java.base/java.io` option there, but without the `[17|]` prefix. Make sure to replace it with the version above) + +Start Payara: + +```shell +sudo service payara start +``` + +4\. Deploy this version. + +```shell +$PAYARA/bin/asadmin deploy dataverse-6.3.war +``` + +5\. For installations with internationalization: + +- Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs). + +6\. Restart Payara + +```shell +service payara stop +service payara start +``` + +7\. Update the following metadata blocks to reflect the incremental improvements made to the handling of core metadata fields: + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/scripts/api/data/metadatablocks/citation.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv + +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/scripts/api/data/metadatablocks/biomedical.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file biomedical.tsv + +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/scripts/api/data/metadatablocks/computational_workflow.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file computational_workflow.tsv + +``` + +8\. Upgrade Solr + +Solr 9.4.1 is now the version recommended in our Installation Guide and used with automated testing. There is a known security issue in the previously recommended version 9.3.0: https://nvd.nist.gov/vuln/detail/CVE-2023-36478. While the risk of an exploit should not be significant unless the Solr instance is accessible from outside networks (which we have always recommended against), we recommend to upgrade. + +Install Solr 9.4.1 following the [instructions](https://guides.dataverse.org/en/6.3/installation/prerequisites.html#solr) from the Installation Guide. + +The instructions in the guide suggest to use the config files from the installer zip bundle. Upgrading an existing instance, it may be easier to download them from the source tree: + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/conf/solr/solrconfig.xml +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/conf/solr/schema.xml +cp solrconfig.xml schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf +``` + +8a\. For installations with custom or experimental metadata blocks: + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.3/installation/prerequisites.html#solr-init-script)). + +- Run the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the correct path of your Solr installation): + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/conf/solr/update-fields.sh +chmod +x update-fields.sh +curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml +``` + +- Start Solr instance (usually `service solr start` depending on Solr/OS). + +9\. Enable the Metadata Source facet for harvested content (Optional): + +If you choose to enable this new feature, set the optional feature flag (jvm option) `dataverse.feature.index-harvested-metadata-source=true` before reindexing. + +10\. Reindex Solr, if you upgraded Solr (recommended), or chose to enable any options that require a reindex: + +```shell +curl http://localhost:8080/api/admin/index +``` + +Note: if you choose to perform a migration of your `keywordValue` metadata fields (section below), that will require a reindex as well, so do that first. + +## Notes for Dataverse Installation Administrators + +### Data migration to the new `keywordTermURI` field + +You can migrate your `keywordValue` data containing URIs to the new `keywordTermURI` field. +In case of data migration, view the affected data with the following database query: + +```sql +SELECT value FROM datasetfieldvalue dfv +INNER JOIN datasetfield df ON df.id = dfv.datasetfield_id +WHERE df.datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordValue') +AND value ILIKE 'http%'; +``` + +If you wish to migrate your data, a database update is then necessary: + +```sql +UPDATE datasetfield df +SET datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordTermURI') +FROM datasetfieldvalue dfv +WHERE dfv.datasetfield_id = df.id +AND df.datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordValue') +AND dfv.value ILIKE 'http%'; +``` + +A [reindex in place](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) will be required. ReExportAll will need to be run to update the metadata exports of the dataset. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/latest/admin/metadataexport.html#batch-exports-through-the-api). + +[↑ Table of Contents](#table-of-contents) diff --git a/doc/release-notes/6.4-release-notes.md b/doc/release-notes/6.4-release-notes.md new file mode 100644 index 00000000000..979fd16bf9e --- /dev/null +++ b/doc/release-notes/6.4-release-notes.md @@ -0,0 +1,526 @@ +# Dataverse 6.4 + +Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.4 rather than the list of releases, which will cut them off. + +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +## Release Highlights + +New features in Dataverse 6.4: + +- Enhanced DataCite Metadata, including "Relation Type" +- All ISO 639-3 languages are now supported +- There is now a button for "Unlink Dataset" +- Users will have DOIs/PIDs reserved for their files as part of file upload instead of at publication time +- Datasets can now have types such as "software" or "workflow" +- Croissant support +- RO-Crate support +- and more! Please see below. + +New client library: + +- Rust + +This release also fixes two important bugs described below and in [a post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ) on the mailing list: + +- "Update Current Version" can cause metadata loss +- Publishing breaks designated dataset thumbnail, messes up collection page + +Additional details on the above as well as many more features and bug fixes included in the release are described below. Read on! + +## Features Added + +### Enhanced DataCite Metadata, Including "Relation Type" + +Within the "Related Publication" field, a new subfield has been added called "Relation Type" that allows for the most common [values](https://datacite-metadata-schema.readthedocs.io/en/4.5/appendices/appendix-1/relationType/) recommended by DataCite: isCitedBy, Cites, IsSupplementTo, IsSupplementedBy, IsReferencedBy, and References. For existing datasets where no "Relation Type" has been specified, "IsSupplementTo" is assumed. + +Dataverse now supports the [DataCite v4.5 schema](http://schema.datacite.org/meta/kernel-4/). Additional metadata is now being sent to DataCite including metadata about related publications and files in the dataset. Improved metadata is being sent including how PIDs (ORCID, ROR, DOIs, etc.), license/terms, geospatial, and other metadata are represented. The enhanced metadata will automatically be sent to DataCite when datasets are created and published. Additionally, after publication, you can inspect what was sent by looking at the DataCite XML export. + +The additions are in rough alignment with the OpenAIRE XML export, but there are some minor differences in addition to the Relation Type addition, including an update to the DataCite 4.5 schema. For details see #10632, #10615 and the [design document](https://docs.google.com/document/d/1JzDo9UOIy9dVvaHvtIbOI8tFU6bWdfDfuQvWWpC0tkA/edit?usp=sharing) referenced there. + +Multiple backward incompatible changes and bug fixes have been made to API calls (three of four of which were not documented) related to updating PID target URLs and metadata at the provider service: +- [Update Target URL for a Published Dataset at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-target-url-for-a-published-dataset-at-the-pid-provider) +- [Update Target URL for all Published Datasets at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-target-url-for-all-published-datasets-at-the-pid-provider) +- [Update Metadata for a Published Dataset at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-a-published-dataset-at-the-pid-provider) +- [Update Metadata for all Published Datasets at the PID provider](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider) + +### Full List of ISO 639-3 Languages Now Supported + +The controlled vocabulary values list for the metadata field "Language" in the citation block has now been extended to include roughly 7920 ISO 639-3 values. + +Some of the language entries in the pre-6.4 list correspond to "macro languages" in ISO-639-3 and admins/users may wish to update to use the corresponding individual language entries from ISO-639-3. As these cases are expected to be rare (they do not involve major world languages), finding them is not covered in the release notes. Anyone who desires help in this area is encouraged to reach out to the Dataverse community via any of the standard communication channels. + +ISO 639-3 codes were downloaded from [sil.org](https://iso639-3.sil.org/code_tables/download_tables#Complete%20Code%20Tables:~:text=iso%2D639%2D3_Code_Tables_20240415.zip) and the file used for merging with the existing citation.tsv was "iso-639-3.tab". See also #8578 and #10762. + +### Unlink Dataset Button + +A new "Unlink Dataset" button has been added to the dataset page to allow a user to unlink a dataset from a collection. To unlink a dataset the user must have permission to link the dataset. Additionally, the [existing API](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#unlink-a-dataset) for unlinking datasets has been updated to no longer require superuser access as the "Publish Dataset" permission is now enough. See also #10583 and #10689. + +### Pre-Publish File DOI Reservation + +Dataverse installations using DataCite as a persistent identifier (PID) provider (or other providers that support reserving PIDs) will be able to reserve PIDs for files when they are uploaded (rather than at publication time). Note that reserving file DOIs can slow uploads with large numbers of files so administrators may need to adjust timeouts (specifically any Apache "``ProxyPass / ajp://localhost:8009/ timeout=``" setting in the recommended Dataverse configuration). + +### Initial Support for Dataset Types + +Out of the box, all datasets now have the type "dataset" but superusers can add additional types. At this time the type of a dataset can only be set at creation time via API. The types "dataset", "software", and "workflow" (just those three, for now) will be sent to DataCite (as `resourceTypeGeneral`) when the dataset is published. + +For details see [the guides](https://guides.dataverse.org/en/6.4/user/dataset-management.html#dataset-types), #10517 and #10694. Please note that this feature is highly experimental and is expected to [evolve](https://github.com/IQSS/dataverse-pm/issues/307). + +### Croissant Support (Metadata Export) + +A new metadata export format called [Croissant](https://github.com/mlcommons/croissant) is now available as an external metadata exporter. It is oriented toward making datasets consumable by machine learning. + +For more about the Croissant exporter, including installation instructions, see . See also #10341, #10533, and [discussion](https://groups.google.com/g/dataverse-community/c/JI8HPgGarr8/m/DqEIkiwlAgAJ) on the mailing list. + +Please note: the Croissant exporter works best with Dataverse 6.2 and higher (where it updates the content of `` as [described](https://guides.dataverse.org/en/6.4/admin/discoverability.html#schema-org-head) in the guides) but can be used with 6.0 and higher (to get the export functionality). + +### RO-Crate Support (Metadata Export) + +Dataverse now supports [RO-Crate](https://www.researchobject.org/ro-crate/) as a metadata export format. This functionality is not available out of the box, but you can enable one or more RO-Crate exporters from the [list of external exporters](https://guides.dataverse.org/en/6.4/installation/advanced.html#inventory-of-external-exporters). See also #10744 and #10796. + +### Rust API Client Library + +An Dataverse API client library for the Rust programming language is now available at https://github.com/gdcc/rust-dataverse and has been added to the [list of client libraries](https://guides.dataverse.org/en/6.4/api/client-libraries.html) in the API Guide. See also #10758. + +### Collection Thumbnail Logo for Featured Collections + +Collections can now have a thumbnail logo that is displayed when the collection is configured as a featured collection. If present, this thumbnail logo is shown. Otherwise, the collection logo is shown. Configuration is done under the "Theme" for a collection as explained in [the guides](https://guides.dataverse.org/en/6.4/user/dataverse-management.html#theme). See also #10291 and #10433. + +### Saved Searches Can Be Deleted + +Saved searches can now be deleted via API. See the [Saved Search](https://guides.dataverse.org/en/6.4/api/native-api.html#saved-search) section of the API Guide, #9317 and #10198. + +### Notification Email Improvement + +When notification emails are sent the part of the closing that says "contact us for support at" will now show the support email address (`dataverse.mail.support-email`), when configured, instead of the default system email address. Using the system email address here was particularly problematic when it was a "noreply" address. See also #10287 and #10504. + +### Ability to Disable Automatic Thumbnail Selection + +It is now possible to turn off the feature that automatically selects one of the image datafiles to serve as the thumbnail of the parent dataset. An admin can turn it off by enabling the [feature flag](https://guides.dataverse.org/en/6.4/installation/config.html#feature-flags) `dataverse.feature.disable-dataset-thumbnail-autoselect`. When the feature is disabled, a user can still manually pick a thumbnail image, or upload a dedicated thumbnail image. See also #10820. + +### More Flexible PermaLinks + +The configuration setting `dataverse.pid.*.permalink.base-url`, which is used for PermaLinks, has been updated to support greater flexibility. Previously, the string `/citation?persistentId=` was automatically appended to the configured base URL. With this update, the base URL will now be used exactly as configured, without any automatic additions. See also #10775. + +### Globus Async Framework + +A new alternative implementation of Globus polling during upload data transfers has been added in this release. This experimental framework does not rely on the instance staying up continuously for the duration of the transfer and saves the state information about Globus upload requests in the database. See `globus-use-experimental-async-framework` under [Feature Flags](https://guides.dataverse.org/en/6.4/installation/config.html#feature-flags) and [dataverse.files.globus-monitoring-server](https://guides.dataverse.org/en/6.4/installation/config.html#dataverse-files-globus-monitoring-server) in the Installation Guide. See also #10623 and #10781. + +### CVoc (Controlled Vocabulary): Allow ORCID and ROR to Be Used Together in Author Field + +Changes in Dataverse and updates to the ORCID and ROR external vocabulary scripts support deploying these for the citation block author field (and others). See also #10711, #10712, and . + +### Development on Windows + +New instructions have been added for developers on Windows trying to run a Dataverse development environment using Windows Subsystem for Linux (WSL). See [the guides](https://guides.dataverse.org/en/6.4/developers/windows.html), #10606, and #10608. + +### Experimental Crossref PID (DOI) Provider + +Crossref can now be used as a PID (DOI) provider, but this feature is experimental. Please provide feedback through the usual channels. See also the [guides](https://guides.dataverse.org/en/6.4/installation/config.html#crossref-specific-settings), #8581, and #10806. + +### Improved JSON Schema Validation for Datasets + +JSON Schema validation has been enhanced with checks for required and allowed child objects as well as type checking for field types including `primitive`, `compound` and `controlledVocabulary`. More user-friendly error messages help pinpoint the issues in the dataset JSON. See [Retrieve a Dataset JSON Schema for a Collection](https://guides.dataverse.org/en/6.4/api/native-api.html#retrieve-a-dataset-json-schema-for-a-collection) in the API Guide, #10169, and #10543. + +### Counter Processor 1.05 Support (Make Data Count) + +Counter Processor 1.05 is now supported for use with Make Data Count. If you are running Counter Processor, you should reinstall/reconfigure it as described in the latest guides. Note that Counter Processor 1.05 requires Python 3, so you will need to follow the full Counter Processor install. Also note that if you configure the new version the same way, it will reprocess the days in the current month when it is first run. This is normal and will not affect the metrics in Dataverse. See also #10479. + +### Version Tags for Container Base Images + +With this release we introduce a detailed maintenance workflow for our container images. As output of the [Containerization Working Group](https://ct.gdcc.io), the community takes another step towards production ready containers available directly from the core project. + +The maintenance workflow regularly updates the [Container Base Image](https://guides.dataverse.org/en/6.4/container/base-image.html), which contains the operating system, Java, Payara, and tools and libraries required by the Dataverse application. Shipping these rolling releases as well as immutable revisions is the foundation for secure and reliable [Dataverse Application Container](https://guides.dataverse.org/en/6.4/container/app-image.html) images. See also #10478 and #10827. + +## Bugs Fixed + +### Update Current Version + +A significant bug in the superuser-only [Update Current Version](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#make-metadata-updates-without-changing-dataset-version) publication option was fixed. If the "Update Current Version" option was used when changes were made to the dataset terms (rather than to dataset metadata) or if the PID provider service was down or returned an error, the update would fail and render the dataset unusable and require restoration from a backup. The fix in this release allows the update to succeed in both of these cases and redesigns the functionality such that any unknown issues should not make the dataset unusable (i.e. the error would be reported and the dataset would remain in its current state with the last-published version as it was and changes still in the draft version.) + +If you do not plan to upgrade to Dataverse 6.4 right away, you are encouraged to alert your superusers to this issue (see [this post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ)). Here are some workarounds for pre-6.4 versions: + +* Change the "dataset.updateRelease" entry in the Bundle.properties file (or local language version) to "Do Not Use" or similar (this doesn't disable the button but alerts superusers to the issue), or +* Edit the dataset.xhtml file to remove the lines below, delete the contents of the generated and osgi-cache directories in the Dataverse Payara domain, and restart the Payara server. This will remove the "Update Current Version" from the UI. + +``` + + + +``` + +Again, the workarounds above are only for pre-6.4 versions. The bug has been fixed in Dataverse 6.4. See also #10797. + +### Broken Thumbnails + +Dataverse 6.3 introduced a bug where publishing would break the dataset thumbnail, which in turn broke the rendering of the parent collection (dataverse) page. + +This bug has been fixed but any existing broken thumbnails must be fixed manually. See "clearThumbnailFailureFlag" in the upgrade instructions below. + +Additionally, it is now possible to turn off the feature that automatically selects of one of the image datafiles to serve as the thumbnail of the parent dataset. An admin can turn it off by raising the feature flag `-Ddataverse.feature.disable-dataset-thumbnail-autoselect=true`. When the feature is disabled, a user can still manually pick a thumbnail image, or upload a dedicated thumbnail image. + +See also #10819, #10820, and [the post](https://groups.google.com/g/dataverse-community/c/evn5C-pyrS8/m/JrH9vp47DwAJ) on the mailing list. + +### No License, No Terms of Use + +When datasets have neither a license nor custom terms of use, the dataset page will now indicate this. Also, these datasets will no longer be indexed as having custom terms. See also #8796, #10513, and #10614. + +### CC0 License Bug Fix + +At a high level, some datasets have been mislabeled as "Custom License" when they should have been "CC0 1.0". This has been corrected. + +In Dataverse 5.10, datasets with only "CC0 Waiver" in the "termsofuse" field were converted to "Custom License" (instead of the CC0 1.0 license) through a SQL migration script (see #10634). On deployment of Dataverse 6.4, a new SQL migration script will be run automatically to correct this, changing these datasets to CC0. You can review the script in #10634, which only affect the following datasets: + +- The existing "Terms of Use" must be equal to "This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver" (this was set in #10634). +- The following terms fields must be empty: Confidentiality Declaration, Special Permissions, Restrictions, Citation Requirements, Depositor Requirements, Conditions, and Disclaimer. +- The license ID must not be assigned. + +The script will set the license ID to that of the CC0 1.0 license and remove the contents of "termsofuse" field. See also #9081 and #10634. + +### Remap oai_dc Export and Harvesting Format Fields: dc:type and dc:date + +The `oai_dc` export and harvesting format has had the following fields remapped: + +- dc:type was mapped to the field "Kind of Data". Now it is hard-coded to the word "Dataset". +- dc:date was mapped to the field "Production Date" when available and otherwise to "Publication Date". Now it is mapped the field "Publication Date" or the field used for the citation date, if set (see [Set Citation Date Field Type for a Dataset](https://guides.dataverse.org/en/6.4/api/native-api.html#set-citation-date-field-type-for-a-dataset)). + +In order for these changes to be reflected in existing datasets, a [reexport all](https://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api) should be run (mentioned below). See #8129 and #10737. + +### Zip File No Longer Misdetected as Shapefile (Hidden Directories) + +When detecting files types, Dataverse would previously detect a zip file as a shapefile if it contained [markers of a shapefile](https://guides.dataverse.org/en/6.4/developers/geospatial.html) in hidden directories. These hidden directories are now ignored when deciding if a zip file is a shapefile or not. See also #8945 and #10627. + +### External Controlled Vocabulary + +This release fixes a bug (introduced in v6.3) in the external controlled vocabulary mechanism that could cause indexing to fail (with a NullPointerException) when a script is configured for one child field and no other child fields were managed. See also #10869 and #10870. + +### Valid JSON in Error Response + +When any `ApiBlockingFilter` policy applies to a request, the JSON in the body of the error response is now valid JSON. See also #10085. + +### Docker Container Base Image Security and Compatibility + +- Switch "wait-for" to "wait4x", aligned with the Configbaker Image +- Update "jattach" to v2.2 +- Install AMD64 / ARM64 versions of tools as necessary +- Run base image as unprivileged user by default instead of `root` - this was an oversight from OpenShift changes +- Linux User, Payara Admin and Domain Master passwords: + - Print hints about default, public knowledge passwords in place for + - Enable replacing these passwords at container boot time +- Enable building with updates Temurin JRE image based on Ubuntu 24.04 LTS +- Fix entrypoint script troubles with pre- and postboot script files +- Unify location of files at CONFIG_DIR=/opt/payara/config, avoid writing to other places + +See also #10508, #10672 and #10722. + +### Cleanup of Temp Directories + +In this release we addressed an issue where copies of files uploaded via the UI were left in one specific temp directory (`.../domain1/uploads` by default). We would like to remind all the installation admins that it is strongly recommended to have some automated (and aggressive) cleanup mechanisms in place for all the temp directories used by Dataverse. For example, at Harvard/IQSS we have the following configuration for the PrimeFaces uploads directory above: (note that, even with this fix in place, PrimeFaces will be leaving a large number of small log files in that location) + +Instead of the default location (`.../domain1/uploads`) we use a directory on a dedicated partition, outside of the filesystem where Dataverse is installed, via the following JVM option: + +``` +-Ddataverse.files.uploads=/uploads/web +``` + +and we have a dedicated cronjob that runs every 30 minutes and deletes everything older than 2 hours in that directory: + +``` +15,45 * * * * /bin/find /uploads/web/ -mmin +119 -type f -name "upload*" -exec rm -f {} \; > /dev/null 2>&1 +``` + +### Trailing Commas in Author Name Now Permitted + +When an author name ended in a comma (e.g. `Smith,` or `Smith, `), the dataset page was broken after publishing (a "500" error page was presented to the user). The underlying issue causing the JSON-LD Schema.org output on the page to break was fixed. See #10343 and #10776. + +## API Updates + +### Search API: affiliation, parentDataverseName, image_url, etc. + +The Search API (`/api/search`) response now includes additional fields, depending on the type. + +For collections (dataverses): + +- "affiliation" +- "parentDataverseName" +- "parentDataverseIdentifier" +- "image_url" (optional) + +```javascript +"items": [ + { + "name": "Darwin's Finches", + ... + "affiliation": "Dataverse.org", + "parentDataverseName": "Root", + "parentDataverseIdentifier": "root", + "image_url":"/api/access/dvCardImage/{identifier}" +(etc, etc) +``` + +For datasets: + +- "image_url" (optional) + +```javascript +"items": [ + { + ... + "image_url": "http://localhost:8080/api/datasets/2/logo" + ... +(etc, etc) +``` + +For files: + +- "releaseOrCreateDate" +- "image_url" (optional) + +```javascript +"items": [ + { + "name": "test.png", + ... + "releaseOrCreateDate": "2016-05-10T12:53:39Z", + "image_url":"/api/access/datafile/42?imageThumb=true" +(etc, etc) +``` + +These examples are also shown in the [Search API](https://guides.dataverse.org/en/6.4/api/search.html) section of the API Guide. + +The image_url field was already part of the SolrSearchResult JSON (and incorrectly appeared in Search API documentation), but it wasn't returned by the API because it was appended only after the Solr query was executed in the SearchIncludeFragment of JSF (the old/current UI framework). Now, the field is set in SearchServiceBean, ensuring it is always returned by the API when an image is available. + +The Solr schema.xml file has been updated to include a new field called "dvParentAlias" for supporting the new response field "parentDataverseIdentifier". See upgrade instructions below. + +See also #10810 and #10811. + +### Search API: publicationStatuses + +The Search API (`/api/search`) response will now include publicationStatuses in the JSON response as long as the list is not empty. + +Example: + +```javascript +"items": [ + { + "name": "Darwin's Finches", + ... + "publicationStatuses": [ + "Unpublished", + "Draft" + ], +(etc, etc) +``` + +See also #10733 and #10738. + +### Search Facet Information Exposed + +A new endpoint `/api/datasetfields/facetables` lists all facetable dataset fields defined in the installation, as described in [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-all-facetable-dataset-fields). + +A new optional query parameter "returnDetails" added to `/api/dataverses/{identifier}/facets/` endpoint to include detailed information of each DataverseFacet, as described in [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-facets-configured-for-a-dataverse-collection). See also #10726 and #10727. + +### User Permissions on Collections + +A new endpoint at `/api/dataverses/{identifier}/userPermissions` for obtaining the user permissions on a collection (dataverse) has been added. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#get-user-permissions-on-a-dataverse), #10749 and #10751. + +### addDataverse Extended + +The addDataverse (`/api/dataverses/{identifier}`) API endpoint has been extended to allow adding metadata blocks, input levels and facet IDs at creation time, as the Dataverse page in create mode does in JSF. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#create-a-dataverse-collection), #10633 and #10644. + +### Metadata Blocks and Display on Create + +The `/api/dataverses/{identifier}/metadatablocks` endpoint has been fixed to not return fields marked as displayOnCreate=true if there is an input level with include=false, when query parameters returnDatasetFieldTypes=true and onlyDisplayedOnCreate=true are set. See also #10741 and #10767. + +The fields "depositor" and "dateOfDeposit" in the citation.tsv metadata block file have been updated to have the property "displayOnCreate" set to TRUE. In practice, only the API is affected because the UI has special logic that already shows these fields when datasets are created. See also and #10850 and #10884. + +### Feature Flags Can Be Listed + +It is now possible to list all feature flags and see if they are enabled or not. See also [the guides](https://guides.dataverse.org/en/6.4/api/native-api.html#list-all-feature-flags) and #10732. + +## Settings Added + +The following settings have been added: + +- dataverse.feature.disable-dataset-thumbnail-autoselect +- dataverse.feature.globus-use-experimental-async-framework +- dataverse.files.globus-monitoring-server +- dataverse.pid.*.crossref.url +- dataverse.pid.*.crossref.rest-api-url +- dataverse.pid.*.crossref.username +- dataverse.pid.*.crossref.password +- dataverse.pid.*.crossref.depositor +- dataverse.pid.*.crossref.depositor-email + +## Backward Incompatible Changes + +- The oai_dc export format has changed. See the "Remap oai_dc" section above. +- Several APIs related to DataCite have changed. See "More and Better Data Sent to DataCite" above. + +## Complete List of Changes + +For the complete list of code changes in this release, see the [6.4 milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.4+is%3Aclosed) in GitHub. + +## Getting Help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/g/dataverse-community) or email support@dataverse.org. + +## Installation + +If this is a new installation, please follow our [Installation Guide](https://guides.dataverse.org/en/latest/installation/). Please don't be shy about [asking for help](https://guides.dataverse.org/en/latest/installation/intro.html#getting-help) if you need it! + +Once you are in production, we would be delighted to update our [map of Dataverse installations](https://dataverse.org/installations) around the world to include yours! Please [create an issue](https://github.com/IQSS/dataverse-installations/issues) or email us at support@dataverse.org to join the club! + +You are also very welcome to join the [Global Dataverse Community Consortium](https://www.gdcc.io/) (GDCC). + +## Upgrade Instructions + +Upgrading requires a maintenance window and downtime. Please plan accordingly, create backups of your database, etc. + +These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.3. + +0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only supported way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. + +```shell +export PAYARA=/usr/local/payara6` +``` + +(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) + +1\. Undeploy the previous version + +```shell +$PAYARA/bin/asadmin undeploy dataverse-6.3 +``` + +2\. Stop and start Payara + +```shell +service payara stop +sudo service payara start +``` + +3\. Deploy this version + +```shell +$PAYARA/bin/asadmin deploy dataverse-6.4.war +``` + +Note: if you have any trouble deploying, stop Payara, remove the following directories, start Payara, and try to deploy again. + +```shell +service payara stop +rm -rf $PAYARA/glassfish/domains/domain1/generated +rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache +rm -rf $PAYARA/glassfish/domains/domain1/lib/databases +``` + +4\. For installations with internationalization: + +Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs). + +5\. Restart Payara + +```shell +service payara stop +service payara start +``` + +6\. Update metadata blocks + +These changes reflect incremental improvements made to the handling of core metadata fields. + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/scripts/api/data/metadatablocks/citation.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv +``` + +7\. Update Solr schema.xml file. Start with the standard v6.4 schema.xml, then, if your installation uses any custom or experimental metadata blocks, update it to include the extra fields (step 7a). + +Stop Solr (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.4/installation/prerequisites.html#solr-init-script)). + +```shell +service solr stop +``` + +Replace schema.xml + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/schema.xml +cp schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf +``` + +Start Solr (but if you use any custom metadata blocks, perform the next step, 7a first). + +```shell +service solr start +``` + +7a\. For installations with custom or experimental metadata blocks: + +Before starting Solr, update the schema to include all the extra metadata fields that your installation uses. We do this by collecting the output of the Dataverse schema API and feeding it to the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the names of the directories, if different): + +```shell + wget https://raw.githubusercontent.com/IQSS/dataverse/v6.4/conf/solr/update-fields.sh + chmod +x update-fields.sh + curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml +``` + +Now start Solr. + +8\. Reindex Solr + +Below is the simplest way to reindex Solr: + +```shell +curl http://localhost:8080/api/admin/index +``` + +The API above rebuilds the existing index "in place". If you want to be absolutely sure that your index is up-to-date and consistent, you may consider wiping it clean and reindexing everything from scratch (see [the guides](https://guides.dataverse.org/en/latest/admin/solr-search-index.html)). Just note that, depending on the size of your database, a full reindex may take a while and the users will be seeing incomplete search results during that window. + +9\. Run reExportAll to update dataset metadata exports + +This step is necessary because of changes described above for the `Datacite` and `oai_dc` export formats. + +Below is the simple way to reexport all dataset metadata. For more advanced usage, please see [the guides](http://guides.dataverse.org/en/6.4/admin/metadataexport.html#batch-exports-through-the-api). + +```shell +curl http://localhost:8080/api/admin/metadata/reExportAll +``` + +10\. Pushing updated metadata to DataCite + +(If you don't use DataCite, you can skip this.) + +Above you updated the citation metadata block and Solr with the new "relationType" field. With these two changes, the "Relation Type" fields will be available and creation/publication of datasets will result in the expanded XML being sent to DataCite. You've also already run "reExportAll" to update the `Datacite` metadata export format. + +Entries at DataCite for published datasets can be updated by a superuser using an API call (newly [documented](https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#update-metadata-for-all-published-datasets-at-the-pid-provider)): + +`curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets/modifyRegistrationPIDMetadataAll` + +This will loop through all published datasets (and released files with PIDs). As long as the loop completes, the call will return a 200/OK response. Any PIDs for which the update fails can be found using the following command: + +`grep 'Failure for id' server.log` + +Failures may occur if PIDs were never registered, or if they were never made findable. Any such cases can be fixed manually in DataCite Fabrica or using the [Reserve a PID](https://guides.dataverse.org/en/6.4/api/native-api.html#reserve-a-pid) API call and the newly documented `/api/datasets//modifyRegistration` call respectively. See https://guides.dataverse.org/en/6.4/admin/dataverses-datasets.html#send-dataset-metadata-to-pid-provider. Please reach out with any questions. + +PIDs can also be updated by a superuser on a per-dataset basis using + +`curl -X POST -H 'X-Dataverse-key:' http://localhost:8080/api/datasets//modifyRegistrationMetadata` + +### Additional Upgrade Steps + +11\. If there are broken thumbnails + +To restore any broken thumbnails caused by the bug described above, you can call the `http://localhost:8080/api/admin/clearThumbnailFailureFlag` API, which will attempt to clear the flag on all files (regardless of whether caused by this bug or some other problem with the file) or the `http://localhost:8080/api/admin/clearThumbnailFailureFlag/$FILE_ID` to clear the flag for individual files. Calling the former, batch API is recommended. + +12\. PermaLinks with custom base-url + +If you currently use PermaLinks with a custom `base-url`: You must manually append `/citation?persistentId=` to the base URL to maintain functionality. + +If you use a PermaLinks without a configured `base-url`, no changes are required. diff --git a/doc/release-notes/8243-improve-language-controlled-vocab.md b/doc/release-notes/8243-improve-language-controlled-vocab.md deleted file mode 100644 index 15b2b46c02d..00000000000 --- a/doc/release-notes/8243-improve-language-controlled-vocab.md +++ /dev/null @@ -1,11 +0,0 @@ -The Controlled Vocabuary Values list for the metadata field Language in the Citation block has been improved, with some missing two- and three-letter ISO 639 codes added, as well as more alternative names for some of the languages, making all these extra language identifiers importable. - -To be added to the 6.3 release instructions: - -Update the Citation block, to incorporate the improved controlled vocabulary for language [plus whatever other improvements may be made to the block in other PRs]: - -``` -wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/scripts/api/data/metadatablocks/citation.tsv -curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv -``` - diff --git a/doc/release-notes/8655-re-add-cell-counting-biomedical-tsv.md b/doc/release-notes/8655-re-add-cell-counting-biomedical-tsv.md deleted file mode 100644 index 295f206871f..00000000000 --- a/doc/release-notes/8655-re-add-cell-counting-biomedical-tsv.md +++ /dev/null @@ -1,12 +0,0 @@ -## Release Highlights - -### Life Science Metadata - -Re-adding value `cell counting` to Life Science metadatablock's Measurement Type vocabularies accidentally removed in `v5.1`. - -## Upgrade Instructions - -### Update the Life Science metadata block - -- `wget https://github.com/IQSS/dataverse/releases/download/v6.3/biomedical.tsv` -- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @biomedical.tsv -H "Content-type: text/tab-separated-values"` \ No newline at end of file diff --git a/doc/release-notes/8936-more-than-50000-entries-in-sitemap.md b/doc/release-notes/8936-more-than-50000-entries-in-sitemap.md deleted file mode 100644 index 7b367e328c1..00000000000 --- a/doc/release-notes/8936-more-than-50000-entries-in-sitemap.md +++ /dev/null @@ -1,11 +0,0 @@ -Dataverse can now handle more than 50,000 items when generating sitemap files, splitting the content across multiple files to comply with the Sitemap protocol. - -For details see https://dataverse-guide--10321.org.readthedocs.build/en/10321/installation/config.html#creating-a-sitemap-and-submitting-it-to-search-engines #8936 and #10321. - -## Upgrade instructions - -If your installation has more than 50,000 entries, you should re-submit your sitemap URL to Google or other search engines. The file in the URL will change from ``sitemap.xml`` to ``sitemap_index.xml``. - -As explained at https://dataverse-guide--10321.org.readthedocs.build/en/10321/installation/config.html#creating-a-sitemap-and-submitting-it-to-search-engines this is the command for regenerating your sitemap: - -`curl -X POST http://localhost:8080/api/admin/sitemap` diff --git a/doc/release-notes/8985-deprecate-rsync.md b/doc/release-notes/8985-deprecate-rsync.md deleted file mode 100644 index 44563f292fd..00000000000 --- a/doc/release-notes/8985-deprecate-rsync.md +++ /dev/null @@ -1,8 +0,0 @@ -Support for rsync has been deprecated. Information has been removed from the guides for rsync and related software such as Data Capture Module (DCM) and Repository Storage Abstraction Layer (RSAL). You can still find this information in [older versions](https://guides.dataverse.org/en/6.2/developers/big-data-support.html#data-capture-module-dcm) of the guides. - -The following related database settings have been deprecated as well: - -- :DataCaptureModuleUrl -- :DownloadMethods -- :LocalDataAccessPath -- :RepositoryStorageAbstractionLayerUrl diff --git a/doc/release-notes/9276-allow-flexible-params-in-retrievaluri-cvoc.md b/doc/release-notes/9276-allow-flexible-params-in-retrievaluri-cvoc.md deleted file mode 100644 index 5e18007e8ae..00000000000 --- a/doc/release-notes/9276-allow-flexible-params-in-retrievaluri-cvoc.md +++ /dev/null @@ -1,14 +0,0 @@ -## Release Highlights - -### Updates on Support for External Vocabulary Services - -#### HTTP Headers - -You are now able to add HTTP request headers required by the service you are implementing (#10331) - -#### Flexible params in retrievalUri - -You can now use `managed-fields` field names as well as the `term-uri-field` field name as parameters in the `retrieval-uri` when configuring an external vocabulary service. `{0}` as an alternative to using the `term-uri-field` name is still supported for backward compatibility. -Also you can specify if the value must be url encoded with `encodeUrl:`. (#10404) - -For example : `"retrieval-uri": "https://data.agroportal.lirmm.fr/ontologies/{keywordVocabulary}/classes/{encodeUrl:keywordTermURL}"` \ No newline at end of file diff --git a/doc/release-notes/9276-doc-cvoc-index-in.md b/doc/release-notes/9276-doc-cvoc-index-in.md deleted file mode 100644 index 78289201511..00000000000 --- a/doc/release-notes/9276-doc-cvoc-index-in.md +++ /dev/null @@ -1,18 +0,0 @@ -## Release Highlights - -### Updates on Support for External Vocabulary Services - -Multiple extensions of the External Vocabulary mechanism have been added. These extensions allow interaction with services based on the Ontoportal software and are expected to be generally useful for other service types. - -These changes include: - -#### Improved Indexing with Compound Fields - -When using an external vocabulary service with compound fields, you can now specify which field(s) will include additional indexed information, such as translations of an entry into other languages. This is done by adding the `indexIn` in `retrieval-filtering`. (#10505) -For more information, please check [GDCC/dataverse-external-vocab-support documentation](https://github.com/gdcc/dataverse-external-vocab-support/tree/main/docs). - -#### Broader Support for Indexing Service Responses - -Indexing of the results from `retrieval-filtering` responses can now handle additional formats including Json Arrays of Strings and values from arbitrary keys within a JSON Object. (#10505) - -**** This documentation must be merged with 9276-allow-flexible-params-in-retrievaluri-cvoc.md (#10404) \ No newline at end of file diff --git a/doc/release-notes/9375-retention-period.md b/doc/release-notes/9375-retention-period.md deleted file mode 100644 index a088cabf138..00000000000 --- a/doc/release-notes/9375-retention-period.md +++ /dev/null @@ -1,8 +0,0 @@ -The Dataverse Software now supports file-level retention periods. The ability to set retention periods, with a minimum duration (in months), can be configured by a Dataverse installation administrator. For more information, see the [Retention Periods section](https://guides.dataverse.org/en/6.3/user/dataset-management.html#retention-periods) of the Dataverse Software Guides. - -- Users can configure a specific retention period, defined by an end date and a short reason, on a set of selected files or an individual file, by selecting the 'Retention Period' menu item and entering information in a popup dialog. Retention Periods can only be set, changed, or removed before a file has been published. After publication, only Dataverse installation administrators can make changes, using an API. - -- After the retention period expires, files can not be previewed or downloaded (as if restricted, with no option to allow access requests). The file (landing) page and all the metadata remains available. - - -Release notes should mention that a Solr schema update is needed. diff --git a/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md b/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md new file mode 100644 index 00000000000..344859e2dbd --- /dev/null +++ b/doc/release-notes/9408-fix-facets-labels-not-translated-in-result-block.md @@ -0,0 +1,7 @@ +## Fix facets filter labels not translated in result block + +On the main page, it's possible to filter results using search facets. If internationalization (i18n) has been activated in the Dataverse installation, allowing pages to be displayed in several languages, the facets are translated in the filter column. However, they aren't translated in the search results and remain in the default language, English. + +This version of Dataverse fix this, and includes internationalization in the facets visible in the search results section. + +For more information, see issue [#9408](https://github.com/IQSS/dataverse/issues/9408) and pull request [#10158](https://github.com/IQSS/dataverse/pull/10158) diff --git a/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md b/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md new file mode 100644 index 00000000000..8c79955891b --- /dev/null +++ b/doc/release-notes/9650-5-improve-list-linked-dataverses-API.md @@ -0,0 +1,5 @@ +The following API have been added: + +/api/datasets/{datasetId}/links + +It lists the linked dataverses to a dataset. It can be executed only by administrators. \ No newline at end of file diff --git a/doc/release-notes/9729-release-notes.md b/doc/release-notes/9729-release-notes.md deleted file mode 100644 index 9dc27995405..00000000000 --- a/doc/release-notes/9729-release-notes.md +++ /dev/null @@ -1 +0,0 @@ -An error is now correctly reported when an attempt is made to assign an identical role to the same collection, dataset, or file. #9729 #10465 \ No newline at end of file diff --git a/doc/release-notes/9739-url-validator.md b/doc/release-notes/9739-url-validator.md deleted file mode 100644 index ad149c54459..00000000000 --- a/doc/release-notes/9739-url-validator.md +++ /dev/null @@ -1,7 +0,0 @@ -## Release Highlights - -### URL validation is more permissive - -Url validation now allows two slashes in the path component of the URL. (#9750) -Among other things, this allows metadata fields of `url` type to be filled with more complex url such as https://archive.softwareheritage.org/browse/directory/561bfe6698ca9e58b552b4eb4e56132cac41c6f9/?origin_url=https://github.com/gem-pasteur/macsyfinder&revision=868637fce184865d8e0436338af66a2648e8f6e1&snapshot=1bde3cb370766b10132c4e004c7cb377979928d1 - diff --git a/doc/release-notes/9887-new-superuser-status-endpoint.md b/doc/release-notes/9887-new-superuser-status-endpoint.md deleted file mode 100644 index 01b1f539f7a..00000000000 --- a/doc/release-notes/9887-new-superuser-status-endpoint.md +++ /dev/null @@ -1 +0,0 @@ -The existing API endpoint for toggling the superuser status of a user has been deprecated in favor of a new API endpoint that allows you to explicitly and idempotently set the status as true or false. For details, see [the guides](https://dataverse-guide--10440.org.readthedocs.build/en/10440/api/native-api.html), #9887 and #10440. \ No newline at end of file diff --git a/doc/release-notes/solr-9.4.1.md b/doc/release-notes/solr-9.4.1.md deleted file mode 100644 index 13624a272ab..00000000000 --- a/doc/release-notes/solr-9.4.1.md +++ /dev/null @@ -1,14 +0,0 @@ -Solr 9.4.1 is now the version recommended in our installation guides and used with automated testing. There is a known security issue in the previously recommended version 9.3.0: https://nvd.nist.gov/vuln/detail/CVE-2023-36478. While the risk of an exploit should not be significant unless the Solr instance is accessible from the outside networks (which we have always recommended against), existing Dataverse installations should consider upgrading. - -For the upgrade instructions section: - -[note that 6.3 will contain other solr-related changes, so the instructions may need to contain information merged from multiple release notes!] - -If you are upgrading Solr: - - Install solr-9.4.1 following the instructions from the Installation guide. - - Run a full reindex to populate the search catalog. - - Note that it may be possible to skip the reindexing step by simply moving the existing `.../server/solr/collection1/` under the new `solr-9.4.1` installation directory. This however has not been thoroughly tested and is not officially supported. - - - - diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.json b/doc/sphinx-guides/source/_static/api/dataset-create-software.json new file mode 100644 index 00000000000..4c649bff0aa --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.json @@ -0,0 +1,82 @@ +{ + "datasetType": "software", + "datasetVersion": { + "license": { + "name": "CC0 1.0", + "uri": "http://creativecommons.org/publicdomain/zero/1.0" + }, + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "pyDataverse", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Range, Jan", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "University of Stuttgart", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { "datasetContactEmail" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value" : "jan@mailinator.com" + }, + "datasetContactName" : { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactName", + "value": "Range, Jan" + } + }], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ { + "dsDescriptionValue":{ + "value": "A Python module for Dataverse.", + "multiple":false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + }}], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": [ + "Computer and Information Science" + ], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} diff --git a/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld new file mode 100644 index 00000000000..6f072967dc8 --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataset-create-software.jsonld @@ -0,0 +1,16 @@ +{ + "http://purl.org/dc/terms/title": "Darwin's Finches", + "http://purl.org/dc/terms/subject": "Medicine, Health and Life Sciences", + "http://purl.org/dc/terms/creator": { + "https://dataverse.org/schema/citation/authorName": "Finch, Fiona", + "https://dataverse.org/schema/citation/authorAffiliation": "Birds Inc." + }, + "https://dataverse.org/schema/citation/datasetContact": { + "https://dataverse.org/schema/citation/datasetContactEmail": "finch@mailinator.com", + "https://dataverse.org/schema/citation/datasetContactName": "Finch, Fiona" + }, + "https://dataverse.org/schema/citation/dsDescription": { + "https://dataverse.org/schema/citation/dsDescriptionValue": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds." + }, + "https://dataverse.org/schema/core#datasetType": "software" +} diff --git a/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json new file mode 100644 index 00000000000..fef32aa1e2c --- /dev/null +++ b/doc/sphinx-guides/source/_static/api/dataverse-complete-optional-params.json @@ -0,0 +1,65 @@ +{ + "name": "Scientific Research", + "alias": "science", + "dataverseContacts": [ + { + "contactEmail": "pi@example.edu" + }, + { + "contactEmail": "student@example.edu" + } + ], + "affiliation": "Scientific Research University", + "description": "We do all the science.", + "dataverseType": "LABORATORY", + "metadataBlocks": { + "metadataBlockNames": [ + "citation", "geospatial" + ], + "inputLevels": [ + { + "datasetFieldTypeName": "geographicCoverage", + "include": true, + "required": true + }, + { + "datasetFieldTypeName": "country", + "include": true, + "required": true + }, + { + "datasetFieldTypeName": "geographicUnit", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "geographicBoundingBox", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "westLongitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "eastLongitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "northLatitude", + "include": false, + "required": false + }, + { + "datasetFieldTypeName": "southLatitude", + "include": false, + "required": false + } + ], + "facetIds": [ + "authorName", "authorAffiliation" + ] + } +} diff --git a/doc/sphinx-guides/source/_static/util/counter_daily.sh b/doc/sphinx-guides/source/_static/util/counter_daily.sh index 674972b18f2..5095a83b7e2 100644 --- a/doc/sphinx-guides/source/_static/util/counter_daily.sh +++ b/doc/sphinx-guides/source/_static/util/counter_daily.sh @@ -1,6 +1,6 @@ #! /bin/bash -COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-0.1.04" +COUNTER_PROCESSOR_DIRECTORY="/usr/local/counter-processor-1.05" MDC_LOG_DIRECTORY="/usr/local/payara6/glassfish/domains/domain1/logs/mdc" # counter_daily.sh diff --git a/doc/sphinx-guides/source/_templates/navbar.html b/doc/sphinx-guides/source/_templates/navbar.html index c7b81dcb937..d88306be8ae 100644 --- a/doc/sphinx-guides/source/_templates/navbar.html +++ b/doc/sphinx-guides/source/_templates/navbar.html @@ -25,7 +25,6 @@